Compare commits
195 Commits
1.7.x
...
1.9.3.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
557a528690 | ||
|
|
762569c826 | ||
|
|
ad7d82f521 | ||
|
|
04deaacbec | ||
|
|
ec443f2b5e | ||
|
|
f1b04ff354 | ||
|
|
62dd7d070a | ||
|
|
5df92a86a3 | ||
|
|
2ca3df1ff4 | ||
|
|
a8751249fd | ||
|
|
19abff826e | ||
|
|
5f199cf81f | ||
|
|
eb26b78a19 | ||
|
|
3d0053c61a | ||
|
|
7b15d246e8 | ||
|
|
b75f4a2834 | ||
|
|
d6ac4c6df5 | ||
|
|
02f56c88f5 | ||
|
|
cd35b9ed2a | ||
|
|
e8944a6c3a | ||
|
|
6fcbc225eb | ||
|
|
f06eda488c | ||
|
|
0ef910445d | ||
|
|
b22ee9d27c | ||
|
|
859a0e83c8 | ||
|
|
b35f151b80 | ||
|
|
17afb07e45 | ||
|
|
b407963344 | ||
|
|
8b31ba1836 | ||
|
|
0cf6edae43 | ||
|
|
0824105377 | ||
|
|
dc936a5b7b | ||
|
|
c8fe02e48e | ||
|
|
32547db306 | ||
|
|
41902154ca | ||
|
|
2354ced1bf | ||
|
|
791cc3a1b8 | ||
|
|
021c03fbbf | ||
|
|
e4a59f29d0 | ||
|
|
64d4880983 | ||
|
|
47c348e03a | ||
|
|
dea86535c1 | ||
|
|
eee6b62589 | ||
|
|
771ca8d84c | ||
|
|
8f5b334951 | ||
|
|
0dc6169282 | ||
|
|
abe78f0428 | ||
|
|
9930ec2d19 | ||
|
|
83d7f4477e | ||
|
|
18c3704c2e | ||
|
|
bef581caa5 | ||
|
|
2f0abe0604 | ||
|
|
4235b44c47 | ||
|
|
f318185ad0 | ||
|
|
43b496287c | ||
|
|
9d0c8ecdc3 | ||
|
|
5a78d99af0 | ||
|
|
693f5ddf6e | ||
|
|
ece655f67d | ||
|
|
119692c979 | ||
|
|
6068f3243a | ||
|
|
a7cda2e793 | ||
|
|
2687cb85f0 | ||
|
|
b2ce1700d2 | ||
|
|
0b634f8340 | ||
|
|
9a078b743f | ||
|
|
65b6576cfc | ||
|
|
78e99e6df2 | ||
|
|
bb0a42733d | ||
|
|
a2ae08e263 | ||
|
|
eaa9d6c7e6 | ||
|
|
8900695153 | ||
|
|
bfe548d573 | ||
|
|
7ab4002771 | ||
|
|
6eace856aa | ||
|
|
f10e5a19c5 | ||
|
|
90a4a63776 | ||
|
|
0f14e35ba3 | ||
|
|
ad0c4207d6 | ||
|
|
97da43645a | ||
|
|
42b7c42617 | ||
|
|
bd81e25e6b | ||
|
|
debe6aa649 | ||
|
|
6f433902f0 | ||
|
|
ba902e7f8e | ||
|
|
7e8ec21684 | ||
|
|
b7131b7efc | ||
|
|
ace99c3464 | ||
|
|
83fc5bc113 | ||
|
|
160de0adf6 | ||
|
|
b4753f3a83 | ||
|
|
bce6e2c78c | ||
|
|
b5ea0eccd2 | ||
|
|
87865b9761 | ||
|
|
13fa4703c0 | ||
|
|
5a21e00322 | ||
|
|
3feed2bc5a | ||
|
|
501b9501e0 | ||
|
|
727271e68c | ||
|
|
63a619dddf | ||
|
|
113566a6ab | ||
|
|
7862841b48 | ||
|
|
fe6cbaa03d | ||
|
|
9ef1fc7304 | ||
|
|
cf3a9d3ced | ||
|
|
1d1c80db7b | ||
|
|
eeb37e9104 | ||
|
|
18bf0daee7 | ||
|
|
1e9189aee7 | ||
|
|
95f6dfafdd | ||
|
|
bedaae8a90 | ||
|
|
7bfa3fe7fd | ||
|
|
143b0b73b9 | ||
|
|
cbfc46270e | ||
|
|
b31efb46ec | ||
|
|
ef3477098f | ||
|
|
9dce117555 | ||
|
|
e66e1e0502 | ||
|
|
19e1e9daeb | ||
|
|
ec8a948f3f | ||
|
|
38fc7641a0 | ||
|
|
ddc3925659 | ||
|
|
f8416edf8f | ||
|
|
4f94f37ce8 | ||
|
|
528de58418 | ||
|
|
e6ea34aed8 | ||
|
|
f171938b00 | ||
|
|
7b27368d2d | ||
|
|
f754df51bc | ||
|
|
77dce53c7a | ||
|
|
73f268e7c4 | ||
|
|
075d7d8131 | ||
|
|
206337044a | ||
|
|
55b44ff7aa | ||
|
|
ae48639ae9 | ||
|
|
6b5e78f810 | ||
|
|
3e485e0a88 | ||
|
|
335c78f908 | ||
|
|
b103e4eaf6 | ||
|
|
c4a6c63d23 | ||
|
|
4a4f10f97b | ||
|
|
a5712daab7 | ||
|
|
28cb1ef106 | ||
|
|
0d99a3e527 | ||
|
|
9da43263ce | ||
|
|
784e199068 | ||
|
|
1ffee802c0 | ||
|
|
6f0ac7f0c2 | ||
|
|
941d4d8985 | ||
|
|
44c76d8ffb | ||
|
|
df9a9f5fb6 | ||
|
|
bebd0fa0e6 | ||
|
|
594e90789d | ||
|
|
f2ab42cb80 | ||
|
|
3224fa8ce7 | ||
|
|
ce156c1344 | ||
|
|
434e553022 | ||
|
|
de5b5ee4b0 | ||
|
|
60636bf56d | ||
|
|
1ca71f93e9 | ||
|
|
63ff39bed6 | ||
|
|
cb0b9604d4 | ||
|
|
1dbe3b62d7 | ||
|
|
5c0707d221 | ||
|
|
c4ffc37dd5 | ||
|
|
aaf93b0f6f | ||
|
|
23eab1e84f | ||
|
|
218f32e552 | ||
|
|
62fbe4d08c | ||
|
|
41ffd00619 | ||
|
|
98b9a604cf | ||
|
|
01468b640a | ||
|
|
4d96b036a2 | ||
|
|
2d1ac15e24 | ||
|
|
2c27e8576f | ||
|
|
67f638d953 | ||
|
|
ea5bd5f7d3 | ||
|
|
394f695416 | ||
|
|
e4db466ab9 | ||
|
|
ee04c014c9 | ||
|
|
ea84f08de8 | ||
|
|
7d8a2b2d56 | ||
|
|
995d1e5aac | ||
|
|
3b918492ae | ||
|
|
66b419163c | ||
|
|
52bff39c22 | ||
|
|
d151a13e87 | ||
|
|
5e7e7d3598 | ||
|
|
356248bd05 | ||
|
|
73a60153f6 | ||
|
|
67cf0e62a7 | ||
|
|
21fbcc3e67 | ||
|
|
0d63ff92a0 | ||
|
|
983645e222 | ||
|
|
d2805bfa47 |
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
Thank you for proposing a pull request. This template will guide you through the essential steps necessary for a pull request.
|
||||
Make sure that:
|
||||
|
||||
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
|
||||
- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
|
||||
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
|
||||
- [ ] You submit test cases (unit or integration tests) that back your changes.
|
||||
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
|
||||
- [ ] You provide your full name and an email address registered with your GitHub account. If you’re a first-time submitter, make sure you have completed the [Contributor’s License Agreement form](https://support.springsource.com/spring_committer_signup).
|
||||
19
.travis.yml
19
.travis.yml
@@ -3,13 +3,28 @@ language: java
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
||||
services:
|
||||
- mongodb
|
||||
before_script:
|
||||
- mongod --version
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- PROFILE=ci
|
||||
- PROFILE=mongo-next
|
||||
- PROFILE=mongo3
|
||||
- PROFILE=mongo3-next
|
||||
- PROFILE=mongo31
|
||||
- PROFILE=mongo32
|
||||
- PROFILE=mongo33
|
||||
|
||||
# Current MongoDB version is 2.4.2 as of 2016-04, see https://github.com/travis-ci/travis-ci/issues/3694
|
||||
# apt-get starts a MongoDB instance so it's not started using before_script
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- mongodb-3.2-precise
|
||||
packages:
|
||||
- mongodb-org-server
|
||||
- mongodb-org-shell
|
||||
|
||||
sudo: false
|
||||
|
||||
|
||||
27
CODE_OF_CONDUCT.adoc
Normal file
27
CODE_OF_CONDUCT.adoc
Normal file
@@ -0,0 +1,27 @@
|
||||
= Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing other's private information, such as physical or electronic addresses,
|
||||
without explicit permission
|
||||
* Other unethical or unprofessional conduct
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
@@ -1 +0,0 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
3
CONTRIBUTING.adoc
Normal file
3
CONTRIBUTING.adoc
Normal file
@@ -0,0 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
66
pom.xml
66
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.6.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.8.3.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -28,8 +28,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.10.3.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.13.0</mongo>
|
||||
<springdata.commons>1.12.3.RELEASE</springdata.commons>
|
||||
<mongo>2.14.3</mongo>
|
||||
<mongo.osgi>2.13.0</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
@@ -107,7 +107,7 @@
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.14.0-SNAPSHOT</mongo>
|
||||
<mongo>2.15.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -123,7 +123,7 @@
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.2</mongo>
|
||||
<mongo>3.0.4</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
@@ -132,7 +132,7 @@
|
||||
|
||||
<id>mongo3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
<mongo>3.0.5-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -143,6 +143,54 @@
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo31</id>
|
||||
<properties>
|
||||
<mongo>3.1.1</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo32</id>
|
||||
<properties>
|
||||
<mongo>3.2.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo33</id>
|
||||
<properties>
|
||||
<mongo>3.3.0</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -156,8 +204,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.1.10.209">
|
||||
<context version="7.2.2.230">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
@@ -35,6 +35,12 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="CDI">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.cdi.**"/>
|
||||
</element>
|
||||
<stereotype name="Unrestricted"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Config" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
@@ -76,6 +82,11 @@
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Script">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.script.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.convert.**"/>
|
||||
@@ -83,6 +94,7 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -105,6 +117,11 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="MapReduce">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapreduce.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.core.**"/>
|
||||
@@ -113,8 +130,10 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|MapReduce" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Util">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -169,7 +188,32 @@
|
||||
</element>
|
||||
<element type="Subsystem" name="Querydsl">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mysema.query.**"/>
|
||||
<element type="IncludeTypePattern" name="com.querydsl.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Slf4j">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.slf4j.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Jackson">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.fasterxml.jackson.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="DOM">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.w3c.dom.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="AOP Alliance">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.aopalliance.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Guava">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.google.common.**"/>
|
||||
</element>
|
||||
</element>
|
||||
</architecture>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<version>1.9.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -59,14 +59,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<scope>provided</scope>
|
||||
@@ -183,7 +183,7 @@
|
||||
<version>${apt}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
</dependency>
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public class BulkOperationException extends DataAccessException {
|
||||
|
||||
private static final long serialVersionUID = 73929601661154421L;
|
||||
|
||||
private final List<BulkWriteError> errors;
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
this.errors = source.getWriteErrors();
|
||||
this.result = source.getWriteResult();
|
||||
}
|
||||
|
||||
public List<BulkWriteError> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public BulkWriteResult getResult() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,10 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.MongoParsingUtils.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.factory.BeanDefinitionStoreException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
|
||||
@@ -34,6 +38,7 @@ import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
|
||||
/**
|
||||
@@ -42,9 +47,22 @@ import com.mongodb.MongoURI;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Viktor Khoroshko
|
||||
*/
|
||||
public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
private static final Set<String> MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES;
|
||||
|
||||
static {
|
||||
|
||||
Set<String> mongoUriAllowedAdditionalAttributes = new HashSet<String>();
|
||||
mongoUriAllowedAdditionalAttributes.add("id");
|
||||
mongoUriAllowedAdditionalAttributes.add("write-concern");
|
||||
|
||||
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
@@ -64,29 +82,25 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
Object source = parserContext.extractSource(element);
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String uri = element.getAttribute("uri");
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
if (StringUtils.hasText(uri)) {
|
||||
if (StringUtils.hasText(mongoRef) || StringUtils.hasText(dbname) || userCredentials != null) {
|
||||
parserContext.getReaderContext().error("Configure either Mongo URI or details individually!", source);
|
||||
}
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
|
||||
dbFactoryBuilder.addConstructorArgValue(getMongoUri(uri));
|
||||
if (mongoUri != null) {
|
||||
|
||||
dbFactoryBuilder.addConstructorArgValue(mongoUri);
|
||||
return getSourceBeanDefinition(dbFactoryBuilder, parserContext, element);
|
||||
}
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Defaulting
|
||||
if (StringUtils.hasText(mongoRef)) {
|
||||
dbFactoryBuilder.addConstructorArgReference(mongoRef);
|
||||
@@ -147,14 +161,42 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI}.
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
* @param uri
|
||||
* @return
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
private BeanDefinition getMongoUri(String uri) {
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(MongoURI.class);
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int allowedAttributesCount = 1;
|
||||
for (String attribute : MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES) {
|
||||
|
||||
if (element.hasAttribute(attribute)) {
|
||||
allowedAttributesCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
Class<?> type = hasClientUri ? MongoClientURI.class : MongoURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
|
||||
@@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
|
||||
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
|
||||
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
|
||||
/**
|
||||
* Mode for bulk operation.
|
||||
**/
|
||||
public enum BulkMode {
|
||||
|
||||
/** Perform bulk operations in sequence. The first error will cancel processing. */
|
||||
ORDERED,
|
||||
|
||||
/** Perform bulk operations in parallel. Processing will continue on errors. */
|
||||
UNORDERED
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single insert to the bulk operation.
|
||||
*
|
||||
* @param documents the document to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(Object documents);
|
||||
|
||||
/**
|
||||
* Add a list of inserts to the bulk operation.
|
||||
*
|
||||
* @param documents List of documents to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(List<? extends Object> documents);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param updates Updates/insert operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single remove operation to the bulk operation.
|
||||
*
|
||||
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(Query remove);
|
||||
|
||||
/**
|
||||
* Add a list of remove operations to the bulk operation.
|
||||
*
|
||||
* @param removes the remove operations to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
* @return Result of the bulk operation providing counters for inserts/updates etc.
|
||||
* @throws {@link BulkOperationException} if an error occurred during bulk processing.
|
||||
*/
|
||||
BulkWriteResult execute();
|
||||
}
|
||||
@@ -0,0 +1,327 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteOperation;
|
||||
import com.mongodb.BulkWriteRequestBuilder;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final BulkMode bulkMode;
|
||||
private final String collectionName;
|
||||
private final Class<?> entityType;
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private WriteConcernResolver writeConcernResolver;
|
||||
private WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOperation bulk;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, {@link BulkMode}, collection
|
||||
* name and {@link WriteConcern}.
|
||||
*
|
||||
* @param mongoOperations The underlying {@link MongoOperations}, must not be {@literal null}.
|
||||
* @param bulkMode must not be {@literal null}.
|
||||
* @param collectionName Name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @param entityType the entity type, can be {@literal null}.
|
||||
*/
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName,
|
||||
Class<?> entityType) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.notNull(bulkMode, "BulkMode must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.bulkMode = bulkMode;
|
||||
this.collectionName = collectionName;
|
||||
this.entityType = entityType;
|
||||
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @param exceptionTranslator can be {@literal null}.
|
||||
*/
|
||||
public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcernResolver} to be used. Defaults to {@link DefaultWriteConcernResolver}.
|
||||
*
|
||||
* @param writeConcernResolver can be {@literal null}.
|
||||
*/
|
||||
public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) {
|
||||
this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE
|
||||
: writeConcernResolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
* @param defaultWriteConcern can be {@literal null}.
|
||||
*/
|
||||
public void setDefaultWriteConcern(WriteConcern defaultWriteConcern) {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
bulk.insert((DBObject) mongoOperations.getConverter().convertToMongoType(document));
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null!");
|
||||
|
||||
for (Object document : documents) {
|
||||
insert(document);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateOne(Arrays.asList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateMulti(Arrays.asList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
upsert(update.getFirst(), update.getSecond());
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
bulk.find(query.getQueryObject()).remove();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null!");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
*/
|
||||
@Override
|
||||
public BulkWriteResult execute() {
|
||||
|
||||
MongoAction action = new MongoAction(defaultWriteConcern, MongoActionOperation.BULK, collectionName, entityType,
|
||||
null, null);
|
||||
WriteConcern writeConcern = writeConcernResolver.resolve(action);
|
||||
|
||||
try {
|
||||
|
||||
return writeConcern == null ? bulk.execute() : bulk.execute(writeConcern);
|
||||
|
||||
} catch (BulkWriteException o_O) {
|
||||
|
||||
DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O);
|
||||
throw toThrow == null ? o_O : toThrow;
|
||||
|
||||
} finally {
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
* @param query the {@link Query} to determine documents to update.
|
||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
||||
* @param upsert whether to upsert.
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
BulkWriteRequestBuilder builder = bulk.find(query.getQueryObject());
|
||||
|
||||
if (upsert) {
|
||||
|
||||
if (multi) {
|
||||
builder.upsert().update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.upsert().updateOne(update.getUpdateObject());
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
if (multi) {
|
||||
builder.update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.updateOne(update.getUpdateObject());
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private final BulkWriteOperation initBulkOperation() {
|
||||
|
||||
DBCollection collection = mongoOperations.getCollection(collectionName);
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return collection.initializeOrderedBulkOperation();
|
||||
case UNORDERED:
|
||||
return collection.initializeUnorderedBulkOperation();
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -98,7 +98,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
@Override
|
||||
public Object doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.eval(script.getCode(), convertScriptArgs(args));
|
||||
return db.eval(script.getCode(), convertScriptArgs(false, args));
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -155,7 +155,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
return scriptNames;
|
||||
}
|
||||
|
||||
private Object[] convertScriptArgs(Object... args) {
|
||||
private Object[] convertScriptArgs(boolean quote, Object... args) {
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
return args;
|
||||
@@ -164,15 +164,15 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
List<Object> convertedValues = new ArrayList<Object>(args.length);
|
||||
|
||||
for (Object arg : args) {
|
||||
convertedValues.add(arg instanceof String ? String.format("'%s'", arg) : this.mongoOperations.getConverter()
|
||||
.convertToMongoType(arg));
|
||||
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)
|
||||
: this.mongoOperations.getConverter().convertToMongoType(arg));
|
||||
}
|
||||
|
||||
return convertedValues.toArray();
|
||||
}
|
||||
|
||||
private String convertAndJoinScriptArgs(Object... args) {
|
||||
return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(args));
|
||||
return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(true, args));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default {@link WriteConcernResolver} resolving the {@link WriteConcern} from the given {@link MongoAction}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,5 +25,5 @@ package org.springframework.data.mongodb.core;
|
||||
*/
|
||||
public enum MongoActionOperation {
|
||||
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK;
|
||||
}
|
||||
|
||||
@@ -25,10 +25,14 @@ import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.dao.DuplicateKeyException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
@@ -42,12 +46,12 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
|
||||
private static final Set<String> RESOURCE_USAGE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoInternalException"));
|
||||
@@ -81,17 +85,24 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
if (code == 11000 || code == 11001) {
|
||||
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
||||
throw new DuplicateKeyException(ex.getMessage(), ex);
|
||||
} else if (code == 12000 || code == 13440) {
|
||||
} else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) {
|
||||
throw new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
} else if (code == 10003 || code == 12001 || code == 12010 || code == 12011 || code == 12012) {
|
||||
} else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001
|
||||
|| code == 12010 || code == 12011 || code == 12012) {
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) {
|
||||
throw new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
||||
}
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
@@ -292,6 +293,34 @@ public interface MongoOperations {
|
||||
*/
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given collection.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityType the name of the entity class, must not be {@literal null}.
|
||||
* @return {@link BulkOperations} on the named collection associated of the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type and collection name.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityClass the name of the entity class, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection associated with the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName);
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <p/>
|
||||
@@ -600,8 +629,8 @@ public interface MongoOperations {
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -612,8 +641,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -625,8 +654,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -639,8 +668,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -728,9 +757,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -785,9 +814,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,6 +60,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
@@ -137,7 +138,6 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
* @author Doménique Tilleuil
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
@@ -339,7 +339,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCursor cursor = collection.find(mappedQuery, mappedFields);
|
||||
QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType);
|
||||
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType);
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType,
|
||||
collection.getName());
|
||||
|
||||
return new CloseableIterableCursorAdapter<T>(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback);
|
||||
}
|
||||
@@ -372,8 +373,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
@Deprecated
|
||||
public CommandResult executeCommand(final DBObject command, final int options) {
|
||||
return executeCommand(command, (options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred()
|
||||
: ReadPreference.primary());
|
||||
return executeCommand(command,
|
||||
(options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred() : ReadPreference.primary());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -419,7 +420,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
*/
|
||||
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch, CursorPreparer preparer) {
|
||||
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch,
|
||||
CursorPreparer preparer) {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
@@ -542,6 +544,28 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return new DefaultIndexOperations(this, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
|
||||
return bulkOps(bulkMode, null, collectionName);
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, Class<?> entityClass) {
|
||||
return bulkOps(bulkMode, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName) {
|
||||
|
||||
Assert.notNull(mode, "BulkMode must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
DefaultBulkOperations operations = new DefaultBulkOperations(this, mode, collectionName, entityType);
|
||||
|
||||
operations.setExceptionTranslator(exceptionTranslator);
|
||||
operations.setWriteConcernResolver(writeConcernResolver);
|
||||
operations.setDefaultWriteConcern(writeConcern);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#scriptOps()
|
||||
@@ -647,8 +671,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(new ReadDbObjectCallback<T>(
|
||||
mongoConverter, entityClass), near.getMetric());
|
||||
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
@@ -724,8 +748,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
final DBObject dbObject = query == null ? null
|
||||
: queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Long>() {
|
||||
public Long doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
@@ -805,15 +830,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
|
||||
populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -901,10 +926,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
initializeVersionProperty(o);
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(o));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(o, collectionName));
|
||||
writer.write(o, dbDoc);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(o, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(o, dbDoc, collectionName));
|
||||
dbObjectList.add(dbDoc);
|
||||
}
|
||||
List<ObjectId> ids = insertDBObjectList(collectionName, dbObjectList);
|
||||
@@ -912,7 +937,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
for (T obj : batchToSave) {
|
||||
if (i < ids.size()) {
|
||||
populateIdIfNecessary(obj, ids.get(i));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(obj, dbObjectList.get(i)));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(obj, dbObjectList.get(i), collectionName));
|
||||
}
|
||||
i++;
|
||||
}
|
||||
@@ -967,14 +992,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
BasicDBObject dbObject = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
this.mongoConverter.write(objectToSave, dbObject);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbObject));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbObject, collectionName));
|
||||
Update update = Update.fromDBObject(dbObject, ID_FIELD);
|
||||
|
||||
doUpdate(collectionName, query, update, objectToSave.getClass(), false, false);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbObject));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbObject, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -982,15 +1007,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
|
||||
populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
}
|
||||
|
||||
protected Object insertDBObject(final String collectionName, final DBObject dbDoc, final Class<?> entityClass) {
|
||||
@@ -1004,8 +1029,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName,
|
||||
entityClass, dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc) : collection.insert(dbDoc,
|
||||
writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc)
|
||||
: collection.insert(dbDoc, writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.INSERT);
|
||||
return dbDoc.get(ID_FIELD);
|
||||
}
|
||||
@@ -1026,8 +1051,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null,
|
||||
null, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList) : collection.insert(
|
||||
dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList)
|
||||
: collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, null, MongoActionOperation.INSERT_LIST);
|
||||
return null;
|
||||
}
|
||||
@@ -1057,8 +1082,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
|
||||
dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc) : collection.save(dbDoc,
|
||||
writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc)
|
||||
: collection.save(dbDoc, writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.SAVE);
|
||||
return dbDoc.get(ID_FIELD);
|
||||
}
|
||||
@@ -1111,10 +1136,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
update.getUpdateObject(), entity);
|
||||
DBObject queryObj = query == null ? new BasicDBObject()
|
||||
: queryMapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: {} and update: {} in collection: {}",
|
||||
@@ -1255,9 +1280,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty);
|
||||
|
||||
if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(), entity.getClass()
|
||||
.getName()));
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(),
|
||||
entity.getClass().getName()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1287,7 +1312,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return execute(collectionName, new CollectionCallback<WriteResult>() {
|
||||
public WriteResult doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass));
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass, collectionName));
|
||||
|
||||
DBObject dboq = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
@@ -1296,16 +1321,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(dboq), collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
writeConcernToUse);
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq)
|
||||
: collection.remove(dboq, writeConcernToUse);
|
||||
|
||||
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
|
||||
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass));
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass, collectionName));
|
||||
|
||||
return wr;
|
||||
}
|
||||
@@ -1313,13 +1338,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass), determineCollectionName(entityClass));
|
||||
return findAll(entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass, String collectionName) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass), collectionName);
|
||||
return executeFindMultiInternal(new FindCallback(null), null,
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
@@ -1335,8 +1359,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
String reduceFunction, Class<T> entityClass) {
|
||||
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction,
|
||||
new MapReduceOptions().outputTypeInline(), entityClass);
|
||||
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(),
|
||||
entityClass);
|
||||
}
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
@@ -1347,8 +1371,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCollection inputCollection = getCollection(inputCollectionName);
|
||||
|
||||
MapReduceCommand command = new MapReduceCommand(inputCollection, mapFunc, reduceFunc,
|
||||
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(), query == null
|
||||
|| query.getQueryObject() == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(),
|
||||
query == null || query.getQueryObject() == null ? null
|
||||
: queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
|
||||
copyMapReduceOptionsToCommand(query, mapReduceOptions, command);
|
||||
|
||||
@@ -1364,7 +1389,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass, inputCollectionName);
|
||||
|
||||
for (DBObject dbObject : mapReduceOutput.results()) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
@@ -1425,7 +1450,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("retval");
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass, inputCollectionName);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
@@ -1527,7 +1552,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
CommandResult commandResult = executeCommand(command, this.readPreference);
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult), commandResult);
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult, collectionName),
|
||||
commandResult);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1537,7 +1563,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param commandResult
|
||||
* @return
|
||||
*/
|
||||
private <O> List<O> returnPotentiallyMappedResults(Class<O> outputType, CommandResult commandResult) {
|
||||
private <O> List<O> returnPotentiallyMappedResults(Class<O> outputType, CommandResult commandResult,
|
||||
String collectionName) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
|
||||
@@ -1545,7 +1572,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType);
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType, collectionName);
|
||||
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
for (DBObject dbObject : resultSet) {
|
||||
@@ -1682,8 +1709,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
mappedFields, entityClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
this.mongoConverter, entityClass), collectionName);
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields),
|
||||
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1697,8 +1724,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
|
||||
entityClass));
|
||||
return doFind(collectionName, query, fields, entityClass, null,
|
||||
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1716,8 +1743,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
return doFind(collectionName, query, fields, entityClass, preparer,
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
@@ -1777,7 +1804,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
protected <T> T doFindAndModify(String collectionName, DBObject query, DBObject fields, DBObject sort,
|
||||
@@ -1804,7 +1831,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1870,8 +1897,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
T result = objectCallback.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(),
|
||||
collectionName)));
|
||||
T result = objectCallback
|
||||
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName)));
|
||||
return result;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -1896,8 +1923,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback, CursorPreparer preparer,
|
||||
DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
@@ -1987,8 +2014,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ entityClass.getName());
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"No Persistent Entity information found for the class " + entityClass.getName());
|
||||
}
|
||||
return entity.getCollection();
|
||||
}
|
||||
@@ -2052,8 +2079,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
String error = result.getErrorMessage();
|
||||
error = error == null ? "NO MESSAGE" : error;
|
||||
|
||||
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
|
||||
+ source, ex);
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Command execution failed: Error [" + error + "], Command = " + source, ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2218,26 +2245,30 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* {@link MongoReader}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private class ReadDbObjectCallback<T> implements DbObjectCallback<T> {
|
||||
|
||||
private final EntityReader<? super T, DBObject> reader;
|
||||
private final Class<T> type;
|
||||
private final String collectionName;
|
||||
|
||||
public ReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type, String collectionName) {
|
||||
|
||||
public ReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
Assert.notNull(reader);
|
||||
Assert.notNull(type);
|
||||
this.reader = reader;
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
public T doWith(DBObject object) {
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, type));
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, type, collectionName));
|
||||
}
|
||||
T source = reader.read(type, object);
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<T>(object, source));
|
||||
maybeEmitEvent(new AfterConvertEvent<T>(object, source, collectionName));
|
||||
}
|
||||
return source;
|
||||
}
|
||||
@@ -2245,8 +2276,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
|
||||
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
super(reader, type);
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type,
|
||||
String collectionName) {
|
||||
super(reader, type, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -2272,15 +2304,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
private enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
|
||||
class QueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final Query query;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
@@ -37,10 +38,12 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* An {@code Aggregation} is a representation of a list of aggregation steps to be performed by the MongoDB Aggregation
|
||||
* Framework.
|
||||
*
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Alessio Fachechi
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Aggregation {
|
||||
@@ -65,7 +68,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(List<? extends AggregationOperation> operations) {
|
||||
@@ -74,7 +77,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(AggregationOperation... operations) {
|
||||
@@ -84,7 +87,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are
|
||||
* supported in MongoDB version 2.6+.
|
||||
*
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.6
|
||||
@@ -97,7 +100,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -107,7 +110,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -117,7 +120,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(AggregationOperation... aggregationOperations) {
|
||||
@@ -137,7 +140,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations) {
|
||||
@@ -146,7 +149,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
* @param options must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -162,7 +165,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* A pointer to the previous {@link AggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String previousOperation() {
|
||||
@@ -171,7 +174,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -181,7 +184,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} includeing the given {@link Fields}.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -191,7 +194,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
@@ -201,7 +204,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given fields.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -211,7 +214,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given {@link Fields}.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -221,7 +224,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given {@link Sort}.
|
||||
*
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -231,7 +234,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given sort {@link Direction} and {@code fields}.
|
||||
*
|
||||
*
|
||||
* @param direction must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
@@ -242,7 +245,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
*
|
||||
* @param elementsToSkip must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
@@ -252,7 +255,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link LimitOperation} limiting the result to the given number of elements.
|
||||
*
|
||||
*
|
||||
* @param maxElements must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
@@ -262,7 +265,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link Criteria}.
|
||||
*
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -270,12 +273,40 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation}.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 1.9
|
||||
*/
|
||||
public static LookupOperation lookup(String from, String localField, String foreignField, String as) {
|
||||
return lookup(field(from), field(localField), field(foreignField), field(as));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation} for the given {@link Fields}.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 1.9
|
||||
*/
|
||||
public static LookupOperation lookup(Field from, Field localField, Field foreignField, Field as) {
|
||||
return new LookupOperation(from, localField, foreignField, as);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
* @see Fields#fields(String...)
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
* @see Fields#fields(String...)
|
||||
*/
|
||||
public static Fields fields(String... fields) {
|
||||
return Fields.fields(fields);
|
||||
@@ -283,7 +314,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given field name and target reference.
|
||||
*
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param target must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -295,7 +326,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -307,7 +338,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
@@ -317,7 +348,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link DBObject}.
|
||||
*
|
||||
*
|
||||
* @param inputCollectionName the name of the input collection
|
||||
* @return the {@code DBObject} representing this aggregation
|
||||
*/
|
||||
@@ -331,8 +362,14 @@ public class Aggregation {
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), rootContext);
|
||||
|
||||
if (operation instanceof InheritsFieldsAggregationOperation) {
|
||||
context = new InheritingExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context);
|
||||
} else {
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,7 +393,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
@@ -391,7 +428,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Describes the system variables available in MongoDB aggregation framework pipeline expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation-variables
|
||||
*/
|
||||
@@ -404,7 +441,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false}
|
||||
* otherwise.
|
||||
*
|
||||
*
|
||||
* @param fieldRef may be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,16 +20,17 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like
|
||||
* {@code project} and {@code group}.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
interface AggregationExpression {
|
||||
public interface AggregationExpression {
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationExpression} into a {@link DBObject} within the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,9 +24,10 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
@@ -37,11 +38,12 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param rootContext must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext) {
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields,
|
||||
AggregationOperationContext rootContext) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
Assert.notNull(rootContext, "RootContext must not be null!");
|
||||
@@ -79,7 +81,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return
|
||||
@@ -88,6 +90,22 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
Assert.notNull(name, "Name must not be null!");
|
||||
|
||||
FieldReference exposedField = resolveExposedField(field, name);
|
||||
if (exposedField != null) {
|
||||
return exposedField;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a {@link field}/{@link name} for a {@link FieldReference} if possible.
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return the resolved reference or {@literal null}
|
||||
*/
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
ExposedField exposedField = exposedFields.getField(name);
|
||||
|
||||
if (exposedField != null) {
|
||||
@@ -111,7 +129,6 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
return new FieldReference(new ExposedField(name, true));
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,17 +16,28 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* {@link AggregationOperation} that exposes {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s. A {@link FieldsExposingAggregationOperation} implementing the
|
||||
* {@link InheritsFieldsAggregationOperation} will expose fields from its parent operations. Not implementing
|
||||
* {@link InheritsFieldsAggregationOperation} will replace existing exposed fields.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
|
||||
/**
|
||||
* Marker interface for {@link AggregationOperation} that inherits fields from previous operations.
|
||||
*/
|
||||
static interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ExposedFieldsAggregationOperationContext} that inherits fields from its parent
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext {
|
||||
|
||||
private final AggregationOperationContext previousContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param previousContext must not be {@literal null}.
|
||||
*/
|
||||
public InheritingExposedFieldsAggregationOperationContext(ExposedFields exposedFields,
|
||||
AggregationOperationContext previousContext) {
|
||||
|
||||
super(exposedFields, previousContext);
|
||||
Assert.notNull(previousContext, "PreviousContext must not be null!");
|
||||
this.previousContext = previousContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#resolveExposedField(org.springframework.data.mongodb.core.aggregation.Field, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
|
||||
FieldReference fieldReference = super.resolveExposedField(field, name);
|
||||
if (fieldReference != null) {
|
||||
return fieldReference;
|
||||
}
|
||||
|
||||
if (field != null) {
|
||||
return previousContext.getReference(field);
|
||||
}
|
||||
|
||||
return previousContext.getReference(name);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method
|
||||
* {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly.
|
||||
*
|
||||
* @author Alessio Fachechi
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/lookup/#stage._S_lookup
|
||||
* @since 1.9
|
||||
*/
|
||||
public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
|
||||
|
||||
private Field from;
|
||||
private Field localField;
|
||||
private Field foreignField;
|
||||
private ExposedField as;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation} for the given {@link Field}s.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
*/
|
||||
public LookupOperation(Field from, Field localField, Field foreignField, Field as) {
|
||||
|
||||
Assert.notNull(from, "From must not be null!");
|
||||
Assert.notNull(localField, "LocalField must not be null!");
|
||||
Assert.notNull(foreignField, "ForeignField must not be null!");
|
||||
Assert.notNull(as, "As must not be null!");
|
||||
|
||||
this.from = from;
|
||||
this.localField = localField;
|
||||
this.foreignField = foreignField;
|
||||
this.as = new ExposedField(as, true);
|
||||
}
|
||||
|
||||
private LookupOperation() {
|
||||
// used by builder
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.from(as);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject lookupObject = new BasicDBObject();
|
||||
|
||||
lookupObject.append("from", from.getTarget());
|
||||
lookupObject.append("localField", localField.getTarget());
|
||||
lookupObject.append("foreignField", foreignField.getTarget());
|
||||
lookupObject.append("as", as.getTarget());
|
||||
|
||||
return new BasicDBObject("$lookup", lookupObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a builder that allows creation of {@link LookupOperation}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static FromBuilder newLookup() {
|
||||
return new LookupOperationBuilder();
|
||||
}
|
||||
|
||||
public static interface FromBuilder {
|
||||
|
||||
/**
|
||||
* @param name the collection in the same database to perform the join with, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
LocalFieldBuilder from(String name);
|
||||
}
|
||||
|
||||
public static interface LocalFieldBuilder {
|
||||
|
||||
/**
|
||||
* @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or
|
||||
* empty.
|
||||
* @return
|
||||
*/
|
||||
ForeignFieldBuilder localField(String name);
|
||||
}
|
||||
|
||||
public static interface ForeignFieldBuilder {
|
||||
|
||||
/**
|
||||
* @param name the field from the documents in the {@code from} collection, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
AsBuilder foreignField(String name);
|
||||
}
|
||||
|
||||
public static interface AsBuilder {
|
||||
|
||||
/**
|
||||
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
LookupOperation as(String name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for fluent {@link LookupOperation} creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
public static final class LookupOperationBuilder
|
||||
implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder {
|
||||
|
||||
private final LookupOperation lookupOperation;
|
||||
|
||||
private LookupOperationBuilder() {
|
||||
this.lookupOperation = new LookupOperation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new builder for {@link LookupOperation}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static FromBuilder newBuilder() {
|
||||
return new LookupOperationBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalFieldBuilder from(String name) {
|
||||
|
||||
Assert.hasText(name, "'From' must not be null or empty!");
|
||||
lookupOperation.from = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LookupOperation as(String name) {
|
||||
|
||||
Assert.hasText(name, "'As' must not be null or empty!");
|
||||
lookupOperation.as = new ExposedField(Fields.field(name), true);
|
||||
return new LookupOperation(lookupOperation.from, lookupOperation.localField, lookupOperation.foreignField,
|
||||
lookupOperation.as);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AsBuilder foreignField(String name) {
|
||||
|
||||
Assert.hasText(name, "'ForeignField' must not be null or empty!");
|
||||
lookupOperation.foreignField = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ForeignFieldBuilder localField(String name) {
|
||||
|
||||
Assert.hasText(name, "'LocalField' must not be null or empty!");
|
||||
lookupOperation.localField = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,16 +42,6 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.ThreeTenBackPortConverters;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToNamedMongoScriptCoverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.URLToStringConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.util.CacheValue;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -112,17 +102,7 @@ public class CustomConversions {
|
||||
// Add user provided converters to make sure they can override the defaults
|
||||
toRegister.addAll(converters);
|
||||
toRegister.add(CustomToStringConverter.INSTANCE);
|
||||
toRegister.add(BigDecimalToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigDecimalConverter.INSTANCE);
|
||||
toRegister.add(BigIntegerToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigIntegerConverter.INSTANCE);
|
||||
toRegister.add(URLToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToURLConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToStringConverter.INSTANCE);
|
||||
toRegister.add(TermToStringConverter.INSTANCE);
|
||||
toRegister.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
|
||||
toRegister.addAll(MongoConverters.getConvertersToRegister());
|
||||
toRegister.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
toRegister.addAll(GeoConverters.getConvertersToRegister());
|
||||
toRegister.addAll(Jsr310Converters.getConvertersToRegister());
|
||||
@@ -186,7 +166,8 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
if (!added) {
|
||||
throw new IllegalArgumentException("Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
throw new IllegalArgumentException(
|
||||
"Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -416,6 +397,10 @@ public class CustomConversions {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#getConvertibleTypes()
|
||||
*/
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
@@ -424,6 +409,10 @@ public class CustomConversions {
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#convert(java.lang.Object, org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return source.toString();
|
||||
}
|
||||
|
||||
@@ -114,6 +114,40 @@ class DBObjectAccessor {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the underlying {@link DBObject} has a value ({@literal null} or non-{@literal null}) for the given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean hasValue(MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.containsField(fieldName);
|
||||
}
|
||||
|
||||
String[] parts = fieldName.split("\\.");
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link BasicDBObject}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -180,8 +180,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
@@ -387,7 +387,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!", translatedException);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -45,9 +45,9 @@ import com.mongodb.DBObject;
|
||||
public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implements MongoTypeMapper {
|
||||
|
||||
public static final String DEFAULT_TYPE_KEY = "_class";
|
||||
@SuppressWarnings("rawtypes")//
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final TypeInformation<List> LIST_TYPE_INFO = ClassTypeInformation.from(List.class);
|
||||
@SuppressWarnings("rawtypes")//
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final TypeInformation<Map> MAP_TYPE_INFO = ClassTypeInformation.from(Map.class);
|
||||
|
||||
private final TypeAliasAccessor<DBObject> accessor;
|
||||
@@ -58,12 +58,12 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey) {
|
||||
this(typeKey, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(typeKey, Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays
|
||||
.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext,
|
||||
Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
@@ -71,7 +71,8 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor<DBObject> accessor,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, List<? extends TypeInformationMapper> mappers) {
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext,
|
||||
List<? extends TypeInformationMapper> mappers) {
|
||||
|
||||
super(accessor, mappingContext, mappers);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -117,6 +117,10 @@ abstract class GeoConverters {
|
||||
|
||||
Assert.isTrue(source.keySet().size() == 2, "Source must contain 2 elements");
|
||||
|
||||
if (source.containsField("type")) {
|
||||
return DbObjectToGeoJsonPointConverter.INSTANCE.convert(source);
|
||||
}
|
||||
|
||||
return new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
}
|
||||
}
|
||||
@@ -595,7 +599,7 @@ abstract class GeoConverters {
|
||||
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Point"),
|
||||
String.format("Cannot convert type '%s' to Point.", source.get("type")));
|
||||
|
||||
List<Double> dbl = (List<Double>) source.get("coordinates");
|
||||
List<Number> dbl = (List<Number>) source.get("coordinates");
|
||||
return new GeoJsonPoint(dbl.get(0).doubleValue(), dbl.get(1).doubleValue());
|
||||
}
|
||||
}
|
||||
@@ -828,7 +832,7 @@ abstract class GeoConverters {
|
||||
|
||||
Assert.isInstanceOf(List.class, point);
|
||||
|
||||
List<Double> coordinatesList = (List<Double>) point;
|
||||
List<Number> coordinatesList = (List<Number>) point;
|
||||
|
||||
points.add(new GeoJsonPoint(coordinatesList.get(0).doubleValue(), coordinatesList.get(1).doubleValue()));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
* Copyright 2011-2016 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,7 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -29,10 +30,10 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.CollectionFactory;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
@@ -259,7 +260,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
// make sure id property is set before all other properties
|
||||
Object idValue = null;
|
||||
|
||||
if (idProperty != null) {
|
||||
if (idProperty != null && new DBObjectAccessor(dbo).hasValue(idProperty)) {
|
||||
idValue = getValueInternal(idProperty, dbo, evaluator, path);
|
||||
accessor.setProperty(idProperty, idValue);
|
||||
}
|
||||
@@ -896,9 +897,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>()
|
||||
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
|
||||
for (int i = 0; i < sourceValue.size(); i++) {
|
||||
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
for (Object dbObjItem : sourceValue) {
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(
|
||||
@@ -992,20 +991,32 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (obj instanceof DBObject) {
|
||||
|
||||
DBObject newValueDbo = new BasicDBObject();
|
||||
|
||||
for (String vk : ((DBObject) obj).keySet()) {
|
||||
|
||||
Object o = ((DBObject) obj).get(vk);
|
||||
newValueDbo.put(vk, convertToMongoType(o, typeHint));
|
||||
}
|
||||
|
||||
return newValueDbo;
|
||||
}
|
||||
|
||||
if (obj instanceof Map) {
|
||||
DBObject result = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint));
|
||||
|
||||
Map<Object, Object> converted = new LinkedHashMap<Object, Object>();
|
||||
|
||||
for (Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
|
||||
TypeInformation<? extends Object> valueTypeHint = typeHint != null && typeHint.getMapValueType() != null
|
||||
? typeHint.getMapValueType() : typeHint;
|
||||
|
||||
converted.put(getPotentiallyConvertedSimpleWrite(entry.getKey()).toString(),
|
||||
convertToMongoType(entry.getValue(), valueTypeHint));
|
||||
}
|
||||
return result;
|
||||
|
||||
return new BasicDBObject(converted);
|
||||
}
|
||||
|
||||
if (obj.getClass().isArray()) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,16 +19,26 @@ import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Currency;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.ConditionalConverter;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -49,6 +59,36 @@ abstract class MongoConverters {
|
||||
*/
|
||||
private MongoConverters() {}
|
||||
|
||||
/**
|
||||
* Returns the converters to be registered.
|
||||
*
|
||||
* @return
|
||||
* @since 1.9
|
||||
*/
|
||||
public static Collection<Object> getConvertersToRegister() {
|
||||
|
||||
List<Object> converters = new ArrayList<Object>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
converters.add(URLToStringConverter.INSTANCE);
|
||||
converters.add(StringToURLConverter.INSTANCE);
|
||||
converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
converters.add(TermToStringConverter.INSTANCE);
|
||||
converters.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
converters.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
converters.add(CurrencyToStringConverter.INSTANCE);
|
||||
converters.add(StringToCurrencyConverter.INSTANCE);
|
||||
converters.add(AtomicIntegerToIntegerConverter.INSTANCE);
|
||||
converters.add(AtomicLongToLongConverter.INSTANCE);
|
||||
converters.add(LongToAtomicLongConverter.INSTANCE);
|
||||
converters.add(IntegerToAtomicIntegerConverter.INSTANCE);
|
||||
|
||||
return converters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple singleton to convert {@link ObjectId}s to their {@link String} representation.
|
||||
*
|
||||
@@ -228,4 +268,177 @@ abstract class MongoConverters {
|
||||
return builder.get();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting {@link Currency} into its ISO 4217 {@link String} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum CurrencyToStringConverter implements Converter<Currency, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(Currency source) {
|
||||
return source == null ? null : source.getCurrencyCode();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum StringToCurrencyConverter implements Converter<String, Currency> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Currency convert(String source) {
|
||||
return StringUtils.hasText(source) ? Currency.getInstance(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation using {@link NumberUtils} for number conversion and parsing. Additionally
|
||||
* deals with {@link AtomicInteger} and {@link AtomicLong} by calling {@code get()} before performing the actual
|
||||
* conversion.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum NumberToNumberConverterFactory implements ConverterFactory<Number, Number>,ConditionalConverter {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConverterFactory#getConverter(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T extends Number> Converter<Number, T> getConverter(Class<T> targetType) {
|
||||
return new NumberToNumberConverter<T>(targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConditionalConverter#matches(org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
@Override
|
||||
public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return !sourceType.equals(targetType);
|
||||
}
|
||||
|
||||
private final static class NumberToNumberConverter<T extends Number> implements Converter<Number, T> {
|
||||
|
||||
private final Class<T> targetType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link NumberToNumberConverter} for the given target type.
|
||||
*
|
||||
* @param targetType must not be {@literal null}.
|
||||
*/
|
||||
public NumberToNumberConverter(Class<T> targetType) {
|
||||
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
|
||||
this.targetType = targetType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public T convert(Number source) {
|
||||
|
||||
if (source instanceof AtomicInteger) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicInteger) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
if (source instanceof AtomicLong) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicLong) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
return NumberUtils.convertNumberToTargetClass(source, this.targetType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link AtomicLong} into {@link Long}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicLongToLongConverter implements Converter<AtomicLong, Long> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Long convert(AtomicLong source) {
|
||||
return NumberUtils.convertNumberToTargetClass(source, Long.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link AtomicInteger} into {@link Integer}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicIntegerToIntegerConverter implements Converter<AtomicInteger, Integer> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Integer convert(AtomicInteger source) {
|
||||
return NumberUtils.convertNumberToTargetClass(source, Integer.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link Long} into {@link AtomicLong}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum LongToAtomicLongConverter implements Converter<Long, AtomicLong> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public AtomicLong convert(Long source) {
|
||||
return source != null ? new AtomicLong(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link Integer} into {@link AtomicInteger}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum IntegerToAtomicIntegerConverter implements Converter<Integer, AtomicInteger> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public AtomicInteger convert(Integer source) {
|
||||
return source != null ? new AtomicInteger(source) : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,262 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.ExampleMatcher.NullHandler;
|
||||
import org.springframework.data.domain.ExampleMatcher.PropertyValueTransformer;
|
||||
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.MongoRegexCreator;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.repository.core.support.ExampleMatcherAccessor;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.8
|
||||
*/
|
||||
public class MongoExampleMapper {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoConverter converter;
|
||||
private final Map<StringMatcher, Type> stringMatcherPartMapping = new HashMap<StringMatcher, Type>();
|
||||
|
||||
public MongoExampleMapper(MongoConverter converter) {
|
||||
|
||||
this.converter = converter;
|
||||
this.mappingContext = converter.getMappingContext();
|
||||
|
||||
stringMatcherPartMapping.put(StringMatcher.EXACT, Type.SIMPLE_PROPERTY);
|
||||
stringMatcherPartMapping.put(StringMatcher.CONTAINING, Type.CONTAINING);
|
||||
stringMatcherPartMapping.put(StringMatcher.STARTING, Type.STARTING_WITH);
|
||||
stringMatcherPartMapping.put(StringMatcher.ENDING, Type.ENDING_WITH);
|
||||
stringMatcherPartMapping.put(StringMatcher.REGEX, Type.REGEX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link Example} as {@link DBObject} holding matching values extracted from
|
||||
* {@link Example#getProbe()}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public DBObject getMappedExample(Example<?> example) {
|
||||
|
||||
Assert.notNull(example, "Example must not be null!");
|
||||
|
||||
return getMappedExample(example, mappingContext.getPersistentEntity(example.getProbeType()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link Example} as {@link DBObject} holding matching values extracted from
|
||||
* {@link Example#getProbe()}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public DBObject getMappedExample(Example<?> example, MongoPersistentEntity<?> entity) {
|
||||
|
||||
Assert.notNull(example, "Example must not be null!");
|
||||
Assert.notNull(entity, "MongoPersistentEntity must not be null!");
|
||||
|
||||
DBObject reference = (DBObject) converter.convertToMongoType(example.getProbe());
|
||||
|
||||
if (entity.hasIdProperty() && entity.getIdentifierAccessor(example.getProbe()).getIdentifier() == null) {
|
||||
reference.removeField(entity.getIdProperty().getFieldName());
|
||||
}
|
||||
|
||||
ExampleMatcherAccessor matcherAccessor = new ExampleMatcherAccessor(example.getMatcher());
|
||||
|
||||
applyPropertySpecs("", reference, example.getProbeType(), matcherAccessor);
|
||||
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(reference, getTypesToMatch(example));
|
||||
|
||||
return ObjectUtils.nullSafeEquals(NullHandler.INCLUDE, matcherAccessor.getNullHandler()) ? reference
|
||||
: new BasicDBObject(SerializationUtils.flattenMap(reference));
|
||||
}
|
||||
|
||||
private Set<Class<?>> getTypesToMatch(Example<?> example) {
|
||||
|
||||
Set<Class<?>> types = new HashSet<Class<?>>();
|
||||
|
||||
for (TypeInformation<?> reference : mappingContext.getManagedTypes()) {
|
||||
if (example.getProbeType().isAssignableFrom(reference.getType())) {
|
||||
types.add(reference.getType());
|
||||
}
|
||||
}
|
||||
|
||||
return types;
|
||||
}
|
||||
|
||||
private String getMappedPropertyPath(String path, Class<?> probeType) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(probeType);
|
||||
|
||||
Iterator<String> parts = Arrays.asList(path.split("\\.")).iterator();
|
||||
|
||||
final Stack<MongoPersistentProperty> stack = new Stack<MongoPersistentProperty>();
|
||||
|
||||
List<String> resultParts = new ArrayList<String>();
|
||||
|
||||
while (parts.hasNext()) {
|
||||
|
||||
final String part = parts.next();
|
||||
MongoPersistentProperty prop = entity.getPersistentProperty(part);
|
||||
|
||||
if (prop == null) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty property) {
|
||||
|
||||
if (property.getFieldName().equals(part)) {
|
||||
stack.push(property);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (stack.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
prop = stack.pop();
|
||||
}
|
||||
|
||||
resultParts.add(prop.getName());
|
||||
|
||||
if (prop.isEntity() && mappingContext.hasPersistentEntityFor(prop.getActualType())) {
|
||||
entity = mappingContext.getPersistentEntity(prop.getActualType());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return StringUtils.collectionToDelimitedString(resultParts, ".");
|
||||
|
||||
}
|
||||
|
||||
private void applyPropertySpecs(String path, DBObject source, Class<?> probeType,
|
||||
ExampleMatcherAccessor exampleSpecAccessor) {
|
||||
|
||||
if (!(source instanceof BasicDBObject)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<Map.Entry<String, Object>> iter = ((BasicDBObject) source).entrySet().iterator();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> entry = iter.next();
|
||||
String propertyPath = StringUtils.hasText(path) ? path + "." + entry.getKey() : entry.getKey();
|
||||
String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType);
|
||||
|
||||
if (isEmptyIdProperty(entry)) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exampleSpecAccessor.isIgnoredPath(propertyPath) || exampleSpecAccessor.isIgnoredPath(mappedPropertyPath)) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
StringMatcher stringMatcher = exampleSpecAccessor.getDefaultStringMatcher();
|
||||
Object value = entry.getValue();
|
||||
boolean ignoreCase = exampleSpecAccessor.isIgnoreCaseEnabled();
|
||||
|
||||
if (exampleSpecAccessor.hasPropertySpecifiers()) {
|
||||
|
||||
mappedPropertyPath = exampleSpecAccessor.hasPropertySpecifier(propertyPath) ? propertyPath
|
||||
: getMappedPropertyPath(propertyPath, probeType);
|
||||
|
||||
stringMatcher = exampleSpecAccessor.getStringMatcherForPath(mappedPropertyPath);
|
||||
ignoreCase = exampleSpecAccessor.isIgnoreCaseForPath(mappedPropertyPath);
|
||||
}
|
||||
|
||||
// TODO: should a PropertySpecifier outrule the later on string matching?
|
||||
if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) {
|
||||
|
||||
PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath);
|
||||
value = valueTransformer.convert(value);
|
||||
if (value == null) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
entry.setValue(value);
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof String) {
|
||||
applyStringMatcher(entry, stringMatcher, ignoreCase);
|
||||
} else if (entry.getValue() instanceof BasicDBObject) {
|
||||
applyPropertySpecs(propertyPath, (BasicDBObject) entry.getValue(), probeType, exampleSpecAccessor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isEmptyIdProperty(Entry<String, Object> entry) {
|
||||
return entry.getKey().equals("_id") && entry.getValue() == null;
|
||||
}
|
||||
|
||||
private void applyStringMatcher(Map.Entry<String, Object> entry, StringMatcher stringMatcher, boolean ignoreCase) {
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(StringMatcher.DEFAULT, stringMatcher)) {
|
||||
|
||||
if (ignoreCase) {
|
||||
dbo.put("$regex", Pattern.quote((String) entry.getValue()));
|
||||
entry.setValue(dbo);
|
||||
}
|
||||
} else {
|
||||
|
||||
Type type = stringMatcherPartMapping.get(stringMatcher);
|
||||
String expression = MongoRegexCreator.INSTANCE.toRegularExpression((String) entry.getValue(), type);
|
||||
dbo.put("$regex", expression);
|
||||
entry.setValue(dbo);
|
||||
}
|
||||
|
||||
if (ignoreCase) {
|
||||
dbo.put("$options", "i");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,10 +27,12 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.InvalidPersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -56,6 +58,7 @@ import com.mongodb.DBRef;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
@@ -64,12 +67,13 @@ public class QueryMapper {
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
|
||||
private enum MetaMapping {
|
||||
FORCE, WHEN_PRESENT, IGNORE;
|
||||
FORCE, WHEN_PRESENT, IGNORE
|
||||
}
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoExampleMapper exampleMapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link QueryMapper} with the given {@link MongoConverter}.
|
||||
@@ -83,6 +87,7 @@ public class QueryMapper {
|
||||
this.conversionService = converter.getConversionService();
|
||||
this.converter = converter;
|
||||
this.mappingContext = converter.getMappingContext();
|
||||
this.exampleMapper = new MongoExampleMapper(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -119,10 +124,20 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
try {
|
||||
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
} catch (InvalidPersistentPropertyPath invalidPathException) {
|
||||
|
||||
// in case the object has not already been mapped
|
||||
if (!(query.get(key) instanceof DBObject)) {
|
||||
throw invalidPathException;
|
||||
}
|
||||
|
||||
result.put(key, query.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -226,7 +241,7 @@ public class QueryMapper {
|
||||
protected DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) {
|
||||
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
@@ -239,6 +254,10 @@ public class QueryMapper {
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
if (keyword.isSample()) {
|
||||
return exampleMapper.getMappedExample(keyword.<Example<?>> getValue(), entity);
|
||||
}
|
||||
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
@@ -298,7 +317,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
return getMappedKeyword(new Keyword((DBObject) value), documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
@@ -556,6 +575,26 @@ public class QueryMapper {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current keyword is the {@code $geometry} keyword.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public boolean isGeometry() {
|
||||
return "$geometry".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns wheter the current keyword indicates a sample object.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public boolean isSample() {
|
||||
return "$sample".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,8 +27,9 @@ import java.lang.annotation.Target;
|
||||
* @author Laurent Canet
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
|
||||
@@ -29,8 +29,9 @@ import java.lang.annotation.Target;
|
||||
* @author Johno Crawford
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
@@ -29,7 +30,12 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexRes
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
|
||||
@@ -129,9 +135,34 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
try {
|
||||
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
|
||||
} catch (MongoException ex) {
|
||||
|
||||
if (MongoDbErrorCodes.isDataIntegrityViolationCode(ex.getCode())) {
|
||||
|
||||
DBObject existingIndex = fetchIndexInformation(indexDefinition);
|
||||
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
|
||||
|
||||
if (existingIndex != null) {
|
||||
message += " Index already defined as '%s'.";
|
||||
}
|
||||
|
||||
throw new DataIntegrityViolationException(
|
||||
String.format(message, indexDefinition.getPath(), indexDefinition.getCollection(),
|
||||
indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions(), existingIndex),
|
||||
ex);
|
||||
}
|
||||
|
||||
RuntimeException exceptionToThrow = mongoDbFactory.getExceptionTranslator().translateExceptionIfPossible(ex);
|
||||
|
||||
throw exceptionToThrow != null ? exceptionToThrow : ex;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -143,4 +174,28 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
public boolean isIndexCreatorFor(MappingContext<?, ?> context) {
|
||||
return this.mappingContext.equals(context);
|
||||
}
|
||||
|
||||
private DBObject fetchIndexInformation(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
if (indexDefinition == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name");
|
||||
|
||||
for (DBObject index : mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).getIndexInfo()) {
|
||||
if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,11 @@ import java.lang.annotation.Target;
|
||||
* all fields marked with {@link TextIndexed} are combined into one single index. <br />
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.FIELD })
|
||||
@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface TextIndexed {
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
|
||||
Document document = rawType.getAnnotation(Document.class);
|
||||
Document document = this.findAnnotation(Document.class);
|
||||
|
||||
this.expression = detectExpression(document);
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
@@ -1,8 +1,25 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation to define custom metadata for document fields.
|
||||
@@ -11,6 +28,7 @@ import java.lang.annotation.RetentionPolicy;
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
public @interface Field {
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import com.mongodb.DBObject;
|
||||
* Base class for delete events.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -31,11 +32,25 @@ public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject>
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type , possibly be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AbstractDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
|
||||
|
||||
@@ -46,14 +47,14 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public void onApplicationEvent(MongoMappingEvent<?> event) {
|
||||
|
||||
if (event instanceof AfterLoadEvent) {
|
||||
AfterLoadEvent<?> afterLoadEvent = (AfterLoadEvent<?>) event;
|
||||
|
||||
if (domainClass.isAssignableFrom(afterLoadEvent.getType())) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
onAfterLoad((AfterLoadEvent<E>) event);
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -65,18 +66,18 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) {
|
||||
if (event instanceof BeforeDeleteEvent) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
onBeforeDelete((BeforeDeleteEvent<E>) event);
|
||||
}
|
||||
if (event instanceof AfterDeleteEvent) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
onAfterDelete((AfterDeleteEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
E source = (E) event.getSource();
|
||||
Object source = event.getSource();
|
||||
|
||||
// Check for matching domain type and invoke callbacks
|
||||
if (source != null && !domainClass.isAssignableFrom(source.getClass())) {
|
||||
@@ -84,55 +85,185 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
}
|
||||
|
||||
if (event instanceof BeforeConvertEvent) {
|
||||
onBeforeConvert(source);
|
||||
onBeforeConvert((BeforeConvertEvent<E>) event);
|
||||
} else if (event instanceof BeforeSaveEvent) {
|
||||
onBeforeSave(source, event.getDBObject());
|
||||
onBeforeSave((BeforeSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterSaveEvent) {
|
||||
onAfterSave(source, event.getDBObject());
|
||||
onAfterSave((AfterSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterConvertEvent) {
|
||||
onAfterConvert(event.getDBObject(), source);
|
||||
onAfterConvert((AfterConvertEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element before conversion.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeConvert(BeforeConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeConvert(E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeConvert({})", source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param event never {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeConvert(BeforeConvertEvent<E> event) {
|
||||
onBeforeConvert(event.getSource());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation before save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeSave(BeforeSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeSave(BeforeSaveEvent<E> event) {
|
||||
onBeforeSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation after save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterSave(AfterSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterSave(AfterSaveEvent<E> event) {
|
||||
onAfterSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} when read from MongoDB.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterLoad(AfterLoadEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterLoad({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterLoadEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterLoad(AfterLoadEvent<E> event) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} and converted domain type after conversion.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterConvert(AfterConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterConvert(DBObject dbo, E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({}, {})", dbo, source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterConvert(AfterConvertEvent<E> event) {
|
||||
onAfterConvert(event.getDBObject(), event.getSource());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link com.mongodb.DBObject} after delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterDelete(AfterDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterDelete(AfterDeleteEvent<E> event) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link com.mongodb.DBObject} before delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeDelete(BeforeDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link BeforeDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeDelete(BeforeDeleteEvent<E> event) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,20 +13,42 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} thrown after convert of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterConvertEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterConvertEvent(DBObject, Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterConvertEvent(DBObject dbo, E source) {
|
||||
super(source, dbo);
|
||||
this(dbo, source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterConvertEvent(DBObject dbo, E source, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* will be the query document <em>after</am> it has been mapped onto the domain type handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AfterDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin
|
||||
* @author Christoph Leiter
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -36,11 +37,25 @@ public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterLoadEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
this.type = type;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered after save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,12 +20,31 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeConvertEvent<T> extends MongoMappingEvent<T> {
|
||||
|
||||
private static final long serialVersionUID = 252614269008845243L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeConvertEvent(Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeConvertEvent(T source) {
|
||||
super(source, null);
|
||||
this(source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeConvertEvent(T source, String collectionName) {
|
||||
super(source, null, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* document <em>before</em> being mapped based on the domain class handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered before save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,26 +16,69 @@
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Base {@link ApplicationEvent} triggered by Spring Data MongoDB.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoMappingEvent<T> extends ApplicationEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final DBObject dbo;
|
||||
private final String collectionName;
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #MongoMappingEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoMappingEvent(T source, DBObject dbo) {
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this(source, dbo, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
*/
|
||||
public MongoMappingEvent(T source, DBObject dbo, String collectionName) {
|
||||
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
public DBObject getDBObject() {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the collection the event refers to.
|
||||
*
|
||||
* @return {@literal null} if not set.
|
||||
* @since 1.8
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.util.EventObject#getSource()
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@Override
|
||||
public T getSource() {
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.util.Map.Entry;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
@@ -88,6 +89,30 @@ public class Criteria implements CriteriaDefinition {
|
||||
return new Criteria(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
* @see Criteria#alike(Example)
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Criteria byExample(Object example) {
|
||||
return byExample(Example.of(example));
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
* @see Criteria#alike(Example)
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Criteria byExample(Example<?> example) {
|
||||
return new Criteria().alike(example);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a Criteria using the provided key
|
||||
*
|
||||
@@ -434,7 +459,23 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatical criterion using a {@literal $maxDistance} operation, for use with $near
|
||||
* Creates criterion using {@code $geoIntersects} operator which matches intersections of the given {@code geoJson}
|
||||
* structure and the documents one. Requires MongoDB 2.4 or better.
|
||||
*
|
||||
* @param geoJson must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public Criteria intersects(GeoJson geoJson) {
|
||||
|
||||
Assert.notNull(geoJson, "GeoJson must not be null!");
|
||||
criteria.put("$geoIntersects", geoJson);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with $near
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/maxDistance/
|
||||
* @param maxDistance
|
||||
@@ -482,6 +523,20 @@ public class Criteria implements CriteriaDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the given object as a pattern.
|
||||
*
|
||||
* @param sample
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public Criteria alike(Example<?> sample) {
|
||||
|
||||
criteria.put("$sample", sample);
|
||||
this.criteriaChain.add(this);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'or' criteria using the $or operator for all of the provided criteria
|
||||
* <p>
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.8
|
||||
*/
|
||||
public enum MongoRegexCreator {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
|
||||
/**
|
||||
* Creates a regular expression String to be used with {@code $regex}.
|
||||
*
|
||||
* @param source the plain String
|
||||
* @param type
|
||||
* @return {@literal source} when {@literal source} or {@literal type} is {@literal null}.
|
||||
*/
|
||||
public String toRegularExpression(String source, Type type) {
|
||||
|
||||
if (type == null || source == null) {
|
||||
return source;
|
||||
}
|
||||
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, type);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
case NOT_CONTAINING:
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Type type) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(Type.REGEX, type)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, type)) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if (source.equals("*")) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,12 +16,15 @@
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
@@ -29,6 +32,7 @@ import com.mongodb.util.JSON;
|
||||
* Utility methods for JSON serialization.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class SerializationUtils {
|
||||
|
||||
@@ -36,6 +40,68 @@ public abstract class SerializationUtils {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens out a given {@link DBObject}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* _id : 1
|
||||
* nested : { value : "conflux"}
|
||||
* }
|
||||
* </code>
|
||||
* will result in
|
||||
* <code>
|
||||
* {
|
||||
* _id : 1
|
||||
* nested.value : "conflux"
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return {@link Collections#emptyMap()} when source is {@literal null}
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Map<String, Object> flattenMap(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<String, Object> result = new HashMap<String, Object>();
|
||||
toFlatMap("", source, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void toFlatMap(String currentPath, Object source, Map<String, Object> map) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
|
||||
BasicDBObject dbo = (BasicDBObject) source;
|
||||
Iterator<Map.Entry<String, Object>> iter = dbo.entrySet().iterator();
|
||||
String pathPrefix = currentPath.isEmpty() ? "" : currentPath + ".";
|
||||
|
||||
while (iter.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> entry = iter.next();
|
||||
|
||||
if (entry.getKey().startsWith("$")) {
|
||||
if (map.containsKey(currentPath)) {
|
||||
((BasicDBObject) map.get(currentPath)).put(entry.getKey(), entry.getValue());
|
||||
} else {
|
||||
map.put(currentPath, new BasicDBObject(entry.getKey(), entry.getValue()));
|
||||
}
|
||||
} else {
|
||||
|
||||
toFlatMap(pathPrefix + entry.getKey(), entry.getValue(), map);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
map.put(currentPath, source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes the given object into pseudo-JSON meaning it's trying to create a JSON representation as far as possible
|
||||
* but falling back to the given object's {@link Object#toString()} method if it's not serializable. Useful for
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,10 +25,11 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Meta {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,19 +18,23 @@ package org.springframework.data.mongodb.repository;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.repository.NoRepositoryBean;
|
||||
import org.springframework.data.repository.PagingAndSortingRepository;
|
||||
import org.springframework.data.repository.query.QueryByExampleExecutor;
|
||||
|
||||
/**
|
||||
* Mongo specific {@link org.springframework.data.repository.Repository} interface.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@NoRepositoryBean
|
||||
public interface MongoRepository<T, ID extends Serializable> extends PagingAndSortingRepository<T, ID> {
|
||||
public interface MongoRepository<T, ID extends Serializable>
|
||||
extends PagingAndSortingRepository<T, ID>, QueryByExampleExecutor<T> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -54,7 +58,7 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
|
||||
* Inserts the given a given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use
|
||||
* the returned instance for further operations as the save operation might have changed the entity instance
|
||||
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
|
||||
*
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return the saved entity
|
||||
* @since 1.7
|
||||
@@ -65,10 +69,21 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
|
||||
* Inserts the given entities. Assumes the given entities to have not been persisted yet and thus will optimize the
|
||||
* insert over a call to {@link #save(Iterable)}. Prefer using {@link #save(Iterable)} to avoid the usage of store
|
||||
* specific API.
|
||||
*
|
||||
*
|
||||
* @param entities must not be {@literal null}.
|
||||
* @return the saved entities
|
||||
* @since 1.7
|
||||
*/
|
||||
<S extends T> List<S> insert(Iterable<S> entities);
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
<S extends T> List<S> findAll(Example<S> example);
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
*/
|
||||
<S extends T> List<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,9 +30,10 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Query {
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.springframework.context.annotation.ComponentScan.Filter;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
|
||||
import org.springframework.data.repository.config.DefaultRepositoryBaseClass;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
|
||||
@@ -107,6 +108,14 @@ public @interface EnableMongoRepositories {
|
||||
*/
|
||||
Class<?> repositoryFactoryBeanClass() default MongoRepositoryFactoryBean.class;
|
||||
|
||||
/**
|
||||
* Configure the repository base class to be used to create repository proxies for this particular configuration.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
Class<?> repositoryBaseClass() default DefaultRepositoryBaseClass.class;
|
||||
|
||||
/**
|
||||
* Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,31 +15,25 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.CollectionExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingConverter;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SingleEntityExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SlicedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.StreamExecution;
|
||||
import org.springframework.data.repository.query.ParameterAccessor;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Base class for {@link RepositoryQuery} implementations for Mongo.
|
||||
*
|
||||
@@ -51,6 +45,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
private final MongoQueryMethod method;
|
||||
private final MongoOperations operations;
|
||||
private final EntityInstantiators instantiators;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -60,11 +55,12 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*/
|
||||
public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations) {
|
||||
|
||||
Assert.notNull(operations);
|
||||
Assert.notNull(method);
|
||||
Assert.notNull(operations, "MongoOperations must not be null!");
|
||||
Assert.notNull(method, "MongoQueryMethod must not be null!");
|
||||
|
||||
this.method = method;
|
||||
this.operations = operations;
|
||||
this.instantiators = new EntityInstantiators();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -86,30 +82,53 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor);
|
||||
String collection = method.getEntityInformation().getCollectionName();
|
||||
|
||||
MongoQueryExecution execution = getExecution(query, accessor,
|
||||
new ResultProcessingConverter(processor, operations, instantiators));
|
||||
|
||||
return execution.execute(query, processor.getReturnedType().getDomainType(), collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the execution instance to use.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param parameters must not be {@literal null}.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private MongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor,
|
||||
Converter<Object, Object> resultProcessing) {
|
||||
|
||||
if (method.isStreamQuery()) {
|
||||
return new StreamExecution().execute(query);
|
||||
} else if (isDeleteQuery()) {
|
||||
return new DeleteExecution().execute(query);
|
||||
return new StreamExecution(operations, resultProcessing);
|
||||
}
|
||||
|
||||
return new ResultProcessingExecution(getExecutionToWrap(query, accessor), resultProcessing);
|
||||
}
|
||||
|
||||
private MongoQueryExecution getExecutionToWrap(Query query, MongoParameterAccessor accessor) {
|
||||
|
||||
if (isDeleteQuery()) {
|
||||
return new DeleteExecution(operations, method);
|
||||
} else if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
return new PagingGeoNearExecution(operations, accessor, method.getReturnType(), this);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
return new GeoNearExecution(operations, accessor, method.getReturnType());
|
||||
} else if (method.isSliceQuery()) {
|
||||
return new SlicedExecution(accessor.getPageable()).execute(query);
|
||||
return new SlicedExecution(operations, accessor.getPageable());
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
return new CollectionExecution(operations, accessor.getPageable());
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
return new PagedExecution(operations, accessor.getPageable());
|
||||
} else {
|
||||
return new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
return new SingleEntityExecution(operations, isCountQuery());
|
||||
}
|
||||
}
|
||||
|
||||
private Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
|
||||
if (method.hasQueryMetaAttributes()) {
|
||||
query.setMeta(method.getQueryMetaAttributes());
|
||||
@@ -127,12 +146,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @return
|
||||
*/
|
||||
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
Query query = createQuery(accessor);
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
return query;
|
||||
return applyQueryMetaAttributesWhenPresent(createQuery(accessor));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,292 +171,4 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @since 1.5
|
||||
*/
|
||||
protected abstract boolean isDeleteQuery();
|
||||
|
||||
private abstract class Execution {
|
||||
|
||||
abstract Object execute(Query query);
|
||||
|
||||
protected List<?> readCollection(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
|
||||
String collectionName = metadata.getCollectionName();
|
||||
return operations.find(query, metadata.getJavaType(), collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for collection returning queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class CollectionExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
CollectionExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query) {
|
||||
return readCollection(query.with(pageable));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
|
||||
final class SlicedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
SlicedExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
int pageSize = pageable.getPageSize();
|
||||
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
|
||||
List result = operations.find(modifiedQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
return new SliceImpl<Object>(hasNext ? result.subList(0, pageSize) : result, pageable, hasNext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for pagination queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class PagedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PagedExecution}.
|
||||
*
|
||||
* @param pageable
|
||||
*/
|
||||
public PagedExecution(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable);
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
String collectionName = metadata.getCollectionName();
|
||||
Class<?> type = metadata.getJavaType();
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, type, collectionName);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
|
||||
if (pageableOutOfScope) {
|
||||
return new PageImpl<Object>(Collections.emptyList(), pageable, count);
|
||||
}
|
||||
|
||||
// Apply raw pagination
|
||||
query = query.with(pageable);
|
||||
|
||||
// Adjust limit if page would exceed the overall limit
|
||||
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, type, collectionName);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to return a single entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
private SingleEntityExecution(boolean countProjection) {
|
||||
this.countProjection = countProjection;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return countProjection ? operations.count(query, metadata.getJavaType()) : operations.findOne(query,
|
||||
metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to execute geo-near queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class GeoNearExecution extends Execution {
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
public GeoNearExecution(MongoParameterAccessor accessor) {
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
GeoResults<?> results = doExecuteQuery(query);
|
||||
return isListOfGeoResult() ? results.getContent() : results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} to return a page.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param countQuery must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object execute(Query query, Query countQuery) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
long count = operations.count(countQuery, metadata.getCollectionName());
|
||||
|
||||
return new GeoPage<Object>(doExecuteQuery(query), accessor.getPageable(), count);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private GeoResults<Object> doExecuteQuery(Query query) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
|
||||
if (query != null) {
|
||||
nearQuery.query(query);
|
||||
}
|
||||
|
||||
Range<Distance> distances = accessor.getDistanceRange();
|
||||
Distance maxDistance = distances.getUpperBound();
|
||||
|
||||
if (maxDistance != null) {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Distance minDistance = distances.getLowerBound();
|
||||
|
||||
if (minDistance != null) {
|
||||
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
private boolean isListOfGeoResult() {
|
||||
|
||||
TypeInformation<?> returnType = method.getReturnType();
|
||||
|
||||
if (!returnType.getType().equals(List.class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = returnType.getComponentType();
|
||||
return componentType == null ? false : GeoResult.class.equals(componentType.getType());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} removing documents matching the query.
|
||||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
final class DeleteExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return deleteAndConvertResult(query, metadata);
|
||||
}
|
||||
|
||||
private Object deleteAndConvertResult(Query query, MongoEntityMetadata<?> metadata) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
final class StreamExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
Object execute(Query query) {
|
||||
|
||||
Class<?> entityType = getQueryMethod().getEntityInformation().getJavaType();
|
||||
|
||||
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) operations.stream(query, entityType));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ import com.mongodb.DBRef;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
|
||||
@@ -89,6 +90,15 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
return delegate.getSort();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getDynamicProjection() {
|
||||
return delegate.getDynamicProjection();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int)
|
||||
@@ -216,7 +226,7 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
/**
|
||||
* Returns the given object as {@link Collection}. Will do a copy of it if it implements {@link Iterable} or is an
|
||||
* array. Will return an empty {@link Collection} in case {@literal null} is given. Will wrap all other types into a
|
||||
* single-element collction
|
||||
* single-element collection.
|
||||
*
|
||||
* @param source
|
||||
* @return
|
||||
@@ -240,6 +250,15 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues()
|
||||
*/
|
||||
@Override
|
||||
public Object[] getValues() {
|
||||
return delegate.getValues();
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Iterator} that adds a method to access elements in a converted manner.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ParameterValueProvider;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link Converter} to instantiate DTOs from fully equipped domain objects.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DtoInstantiatingConverter implements Converter<Object, Object> {
|
||||
|
||||
private final Class<?> targetType;
|
||||
private final MappingContext<? extends PersistentEntity<?, ?>, ? extends PersistentProperty<?>> context;
|
||||
private final EntityInstantiator instantiator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Converter} to instantiate DTOs.
|
||||
*
|
||||
* @param dtoType must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @param instantiators must not be {@literal null}.
|
||||
*/
|
||||
public DtoInstantiatingConverter(Class<?> dtoType,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
EntityInstantiators instantiator) {
|
||||
|
||||
Assert.notNull(dtoType, "DTO type must not be null!");
|
||||
Assert.notNull(context, "MappingContext must not be null!");
|
||||
Assert.notNull(instantiator, "EntityInstantiators must not be null!");
|
||||
|
||||
this.targetType = dtoType;
|
||||
this.context = context;
|
||||
this.instantiator = instantiator.getInstantiatorFor(context.getPersistentEntity(dtoType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object convert(Object source) {
|
||||
|
||||
if (targetType.isInterface()) {
|
||||
return source;
|
||||
}
|
||||
|
||||
final PersistentEntity<?, ?> sourceEntity = context.getPersistentEntity(source.getClass());
|
||||
final PersistentPropertyAccessor sourceAccessor = sourceEntity.getPropertyAccessor(source);
|
||||
final PersistentEntity<?, ?> targetEntity = context.getPersistentEntity(targetType);
|
||||
final PreferredConstructor<?, ? extends PersistentProperty<?>> constructor = targetEntity
|
||||
.getPersistenceConstructor();
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
Object dto = instantiator.createInstance(targetEntity, new ParameterValueProvider() {
|
||||
|
||||
@Override
|
||||
public Object getParameterValue(Parameter parameter) {
|
||||
return sourceAccessor.getProperty(sourceEntity.getPersistentProperty(parameter.getName()));
|
||||
}
|
||||
});
|
||||
|
||||
final PersistentPropertyAccessor dtoAccessor = targetEntity.getPropertyAccessor(dto);
|
||||
|
||||
targetEntity.doWithProperties(new SimplePropertyHandler() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(PersistentProperty<?> property) {
|
||||
|
||||
if (constructor.isConstructorParameter(property)) {
|
||||
return;
|
||||
}
|
||||
|
||||
dtoAccessor.setProperty(property,
|
||||
sourceAccessor.getProperty(sourceEntity.getPersistentProperty(property.getName())));
|
||||
}
|
||||
});
|
||||
|
||||
return dto;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding;
|
||||
import org.springframework.data.repository.query.EvaluationContextProvider;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* {@link ExpressionEvaluatingParameterBinder} allows to evaluate, convert and bind parameters to placholders within a
|
||||
* {@link String}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
class ExpressionEvaluatingParameterBinder {
|
||||
|
||||
private final SpelExpressionParser expressionParser;
|
||||
private final EvaluationContextProvider evaluationContextProvider;
|
||||
|
||||
/**
|
||||
* Creates new {@link ExpressionEvaluatingParameterBinder}
|
||||
*
|
||||
* @param expressionParser must not be {@literal null}.
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
*/
|
||||
public ExpressionEvaluatingParameterBinder(SpelExpressionParser expressionParser,
|
||||
EvaluationContextProvider evaluationContextProvider) {
|
||||
|
||||
Assert.notNull(expressionParser, "ExpressionParser must not be null!");
|
||||
Assert.notNull(evaluationContextProvider, "EvaluationContextProvider must not be null!");
|
||||
|
||||
this.expressionParser = expressionParser;
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind values provided by {@link MongoParameterAccessor} to placeholders in {@literal raw} while considering
|
||||
* potential conversions and parameter types.
|
||||
*
|
||||
* @param raw can be {@literal null} or empty.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param bindingContext must not be {@literal null}.
|
||||
* @return {@literal null} if given {@code raw} value is empty.
|
||||
*/
|
||||
public String bind(String raw, MongoParameterAccessor accessor, BindingContext bindingContext) {
|
||||
|
||||
if (!StringUtils.hasText(raw)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return replacePlaceholders(raw, accessor, bindingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaced the parameter placeholders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input must not be {@literal null} or empty.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param bindings must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, MongoParameterAccessor accessor, BindingContext bindingContext) {
|
||||
|
||||
if (!bindingContext.hasBindings()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
boolean isCompletlyParameterizedQuery = input.matches("^\\?\\d+$");
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindingContext.getBindings()) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx == -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String valueForBinding = getParameterValueForBinding(accessor, bindingContext.getParameters(), binding);
|
||||
|
||||
int start = idx;
|
||||
int end = idx + parameter.length();
|
||||
|
||||
// If the value to bind is an object literal we need to remove the quoting around the expression insertion point.
|
||||
if (valueForBinding.startsWith("{") && !isCompletlyParameterizedQuery) {
|
||||
|
||||
// Is the insertion point actually surrounded by quotes?
|
||||
char beforeStart = result.charAt(start - 1);
|
||||
char afterEnd = result.charAt(end);
|
||||
|
||||
if ((beforeStart == '\'' || beforeStart == '"') && (afterEnd == '\'' || afterEnd == '"')) {
|
||||
|
||||
// Skip preceding and following quote
|
||||
start -= 1;
|
||||
end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
result.replace(start, end, valueForBinding);
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param parameters
|
||||
* @param binding must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(MongoParameterAccessor accessor, MongoParameters parameters,
|
||||
ParameterBinding binding) {
|
||||
|
||||
Object value = binding.isExpression()
|
||||
? evaluateExpression(binding.getExpression(), parameters, accessor.getValues())
|
||||
: accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
if (value instanceof byte[]) {
|
||||
|
||||
String base64representation = DatatypeConverter.printBase64Binary((byte[]) value);
|
||||
|
||||
if (!binding.isQuoted()) {
|
||||
return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}";
|
||||
}
|
||||
|
||||
return base64representation;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the given {@code expressionString}.
|
||||
*
|
||||
* @param expressionString must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
* @param parameterValues must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Object evaluateExpression(String expressionString, MongoParameters parameters, Object[] parameterValues) {
|
||||
|
||||
EvaluationContext evaluationContext = evaluationContextProvider.getEvaluationContext(parameters, parameterValues);
|
||||
Expression expression = expressionParser.parseExpression(expressionString);
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
static class BindingContext {
|
||||
|
||||
final MongoParameters parameters;
|
||||
final List<ParameterBinding> bindings;
|
||||
|
||||
/**
|
||||
* Creates new {@link BindingContext}.
|
||||
*
|
||||
* @param parameters
|
||||
* @param bindings
|
||||
*/
|
||||
public BindingContext(MongoParameters parameters, List<ParameterBinding> bindings) {
|
||||
|
||||
this.parameters = parameters;
|
||||
this.bindings = bindings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} when list of bindings is not empty.
|
||||
*/
|
||||
boolean hasBindings() {
|
||||
return !CollectionUtils.isEmpty(bindings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unmodifiable list of {@link ParameterBinding}s.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public List<ParameterBinding> getBindings() {
|
||||
return Collections.unmodifiableList(bindings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the associated {@link MongoParameters}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public MongoParameters getParameters() {
|
||||
return parameters;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,7 @@ import org.springframework.data.repository.query.ParameterAccessor;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoParameterAccessor extends ParameterAccessor {
|
||||
|
||||
@@ -51,4 +52,12 @@ public interface MongoParameterAccessor extends ParameterAccessor {
|
||||
* @since 1.6
|
||||
*/
|
||||
TextCriteria getFullText();
|
||||
|
||||
/**
|
||||
* Returns the raw parameter values of the underlying query method.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
Object[] getValues();
|
||||
}
|
||||
|
||||
@@ -32,22 +32,25 @@ import org.springframework.util.ClassUtils;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MongoParametersParameterAccessor extends ParametersParameterAccessor implements MongoParameterAccessor {
|
||||
|
||||
private final MongoQueryMethod method;
|
||||
private final List<Object> values;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoParametersParameterAccessor}.
|
||||
*
|
||||
* @param method must not be {@literal null}.
|
||||
* @param values must not be {@@iteral null}.
|
||||
* @param values must not be {@literal null}.
|
||||
*/
|
||||
public MongoParametersParameterAccessor(MongoQueryMethod method, Object[] values) {
|
||||
|
||||
super(method.getParameters(), values);
|
||||
|
||||
this.method = method;
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
public Range<Distance> getDistanceRange() {
|
||||
@@ -122,8 +125,17 @@ public class MongoParametersParameterAccessor extends ParametersParameterAccesso
|
||||
return ((TextCriteria) fullText);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Expected full text parameter to be one of String, Term or TextCriteria but found %s.",
|
||||
ClassUtils.getShortName(fullText.getClass())));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected full text parameter to be one of String, Term or TextCriteria but found %s.",
|
||||
ClassUtils.getShortName(fullText.getClass())));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues()
|
||||
*/
|
||||
@Override
|
||||
public Object[] getValues() {
|
||||
return values.toArray();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.MongoRegexCreator;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor.PotentiallyConvertingIterator;
|
||||
import org.springframework.data.repository.query.parser.AbstractQueryCreator;
|
||||
@@ -46,7 +47,6 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -169,39 +169,38 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria from(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
private Criteria from(Part part, MongoPersistentProperty property, Criteria criteria, Iterator<Object> parameters) {
|
||||
|
||||
Type type = part.getType();
|
||||
|
||||
switch (type) {
|
||||
case AFTER:
|
||||
case GREATER_THAN:
|
||||
return criteria.gt(parameters.nextConverted(property));
|
||||
return criteria.gt(parameters.next());
|
||||
case GREATER_THAN_EQUAL:
|
||||
return criteria.gte(parameters.nextConverted(property));
|
||||
return criteria.gte(parameters.next());
|
||||
case BEFORE:
|
||||
case LESS_THAN:
|
||||
return criteria.lt(parameters.nextConverted(property));
|
||||
return criteria.lt(parameters.next());
|
||||
case LESS_THAN_EQUAL:
|
||||
return criteria.lte(parameters.nextConverted(property));
|
||||
return criteria.lte(parameters.next());
|
||||
case BETWEEN:
|
||||
return criteria.gt(parameters.nextConverted(property)).lt(parameters.nextConverted(property));
|
||||
return criteria.gt(parameters.next()).lt(parameters.next());
|
||||
case IS_NOT_NULL:
|
||||
return criteria.ne(null);
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters, property));
|
||||
return criteria.nin(nextAsArray(parameters));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
return createContainingCriteria(part, property, criteria, parameters);
|
||||
case NOT_CONTAINING:
|
||||
return createContainingCriteria(part, property, criteria, parameters).not();
|
||||
return createContainingCriteria(part, property, criteria.not(), parameters);
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
@@ -241,12 +240,12 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return criteria.within((Shape) parameter);
|
||||
case SIMPLE_PROPERTY:
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.is(parameters.nextConverted(property))
|
||||
return isSimpleComparisionPossible(part) ? criteria.is(parameters.next())
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, false);
|
||||
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.ne(parameters.nextConverted(property))
|
||||
return isSimpleComparisionPossible(part) ? criteria.ne(parameters.next())
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, true);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported keyword!");
|
||||
@@ -278,7 +277,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @return the criteria extended with the like-regex.
|
||||
*/
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
Iterator<Object> parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
@@ -297,7 +296,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
|
||||
case NEVER:
|
||||
// intentional no-op
|
||||
@@ -319,10 +318,10 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @return
|
||||
*/
|
||||
private Criteria createContainingCriteria(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
Iterator<Object> parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
@@ -377,8 +376,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
Object next = iterator.nextConverted(property);
|
||||
private Object[] nextAsArray(Iterator<Object> iterator) {
|
||||
|
||||
Object next = iterator.next();
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
@@ -390,61 +390,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
}
|
||||
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, part);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
case NOT_CONTAINING:
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Part qpart) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, qpart.getType())) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if ("*".equals(source)) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
return MongoRegexCreator.INSTANCE.toRegularExpression(source, part.getType());
|
||||
}
|
||||
|
||||
private boolean isSpherical(MongoPersistentProperty property) {
|
||||
|
||||
@@ -0,0 +1,381 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.repository.query.ReturnedType;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
interface MongoQueryExecution {
|
||||
|
||||
Object execute(Query query, Class<?> type, String collection);
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for collection returning queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class CollectionExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return operations.find(query.with(pageable), type, collection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class SlicedExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
int pageSize = pageable.getPageSize();
|
||||
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
List result = operations.find(modifiedQuery, type, collection);
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
return new SliceImpl<Object>(hasNext ? result.subList(0, pageSize) : result, pageable, hasNext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for pagination queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class PagedExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, type, collection);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
|
||||
if (pageableOutOfScope) {
|
||||
return new PageImpl<Object>(Collections.emptyList(), pageable, count);
|
||||
}
|
||||
|
||||
// Apply raw pagination
|
||||
query = query.with(pageable);
|
||||
|
||||
// Adjust limit if page would exceed the overall limit
|
||||
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, type, collection);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} to return a single entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class SingleEntityExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final boolean countProjection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return countProjection ? operations.count(query, type, collection) : operations.findOne(query, type, collection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} to execute geo-near queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class GeoNearExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final TypeInformation<?> returnType;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
GeoResults<?> results = doExecuteQuery(query, type, collection);
|
||||
return isListOfGeoResult() ? results.getContent() : results;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected GeoResults<Object> doExecuteQuery(Query query, Class<?> type, String collection) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
|
||||
if (query != null) {
|
||||
nearQuery.query(query);
|
||||
}
|
||||
|
||||
Range<Distance> distances = accessor.getDistanceRange();
|
||||
Distance maxDistance = distances.getUpperBound();
|
||||
|
||||
if (maxDistance != null) {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Distance minDistance = distances.getLowerBound();
|
||||
|
||||
if (minDistance != null) {
|
||||
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, type, collection);
|
||||
}
|
||||
|
||||
private boolean isListOfGeoResult() {
|
||||
|
||||
if (!returnType.getType().equals(List.class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = returnType.getComponentType();
|
||||
return componentType == null ? false : GeoResult.class.equals(componentType.getType());
|
||||
}
|
||||
}
|
||||
|
||||
static final class PagingGeoNearExecution extends GeoNearExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final AbstractMongoQuery mongoQuery;
|
||||
|
||||
public PagingGeoNearExecution(MongoOperations operations, MongoParameterAccessor accessor,
|
||||
TypeInformation<?> returnType, AbstractMongoQuery query) {
|
||||
|
||||
super(operations, accessor, returnType);
|
||||
|
||||
this.accessor = accessor;
|
||||
this.operations = operations;
|
||||
this.mongoQuery = query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} to return a page.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param countQuery must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
ConvertingParameterAccessor parameterAccessor = new ConvertingParameterAccessor(operations.getConverter(),
|
||||
accessor);
|
||||
Query countQuery = mongoQuery.applyQueryMetaAttributesWhenPresent(mongoQuery.createCountQuery(parameterAccessor));
|
||||
long count = operations.count(countQuery, collection);
|
||||
|
||||
return new GeoPage<Object>(doExecuteQuery(query, type, collection), accessor.getPageable(), count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} removing documents matching the query.
|
||||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class DeleteExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoQueryMethod method;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, type, collection);
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, type, collection);
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class StreamExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Converter<Object, Object> resultProcessing;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) operations.stream(query, type))
|
||||
.map(new Function<Object, Object>() {
|
||||
|
||||
@Override
|
||||
public Object apply(Object t) {
|
||||
return resultProcessing.convert(t);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link MongoQueryExecution} that wraps the results of the given delegate with the given result processing.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class ResultProcessingExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoQueryExecution delegate;
|
||||
private final @NonNull Converter<Object, Object> converter;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return converter.convert(delegate.execute(query, type, collection));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class ResultProcessingConverter implements Converter<Object, Object> {
|
||||
|
||||
private final @NonNull ResultProcessor processor;
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull EntityInstantiators instantiators;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object convert(Object source) {
|
||||
|
||||
ReturnedType returnedType = processor.getReturnedType();
|
||||
|
||||
if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Converter<Object, Object> converter = new DtoInstantiatingConverter(returnedType.getReturnedType(),
|
||||
operations.getConverter().getMappingContext(), instantiators);
|
||||
|
||||
return processor.processResult(source, converter);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,7 @@ import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
||||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
@@ -42,6 +44,7 @@ import org.springframework.util.StringUtils;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoQueryMethod extends QueryMethod {
|
||||
|
||||
@@ -56,12 +59,15 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
/**
|
||||
* Creates a new {@link MongoQueryMethod} from the given {@link Method}.
|
||||
*
|
||||
* @param method
|
||||
* @param method must not be {@literal null}.
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MongoQueryMethod(Method method, RepositoryMetadata metadata,
|
||||
public MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(method, metadata);
|
||||
super(method, metadata, projectionFactory);
|
||||
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
@@ -132,7 +138,8 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(returnedObjectType);
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
returnedEntity = returnedEntity == null || returnedEntity.getType().isInterface() ? managedEntity
|
||||
: returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
@@ -186,7 +193,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
* @return
|
||||
*/
|
||||
Query getQueryAnnotation() {
|
||||
return method.getAnnotation(Query.class);
|
||||
return AnnotatedElementUtils.findMergedAnnotation(method, Query.class);
|
||||
}
|
||||
|
||||
TypeInformation<?> getReturnType() {
|
||||
@@ -208,7 +215,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
* @since 1.6
|
||||
*/
|
||||
Meta getMetaAnnotation() {
|
||||
return method.getAnnotation(Meta.class);
|
||||
return AnnotatedElementUtils.findMergedAnnotation(method, Meta.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,10 +20,13 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Field;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.TextCriteria;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.repository.query.ReturnedType;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -34,23 +37,27 @@ import com.mongodb.util.JSONParseException;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
private final PartTree tree;
|
||||
private final boolean isGeoNearQuery;
|
||||
private final MappingContext<?, MongoPersistentProperty> context;
|
||||
private final ResultProcessor processor;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}.
|
||||
*
|
||||
* @param method must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
|
||||
super(method, mongoOperations);
|
||||
this.tree = new PartTree(method.getName(), method.getEntityInformation().getJavaType());
|
||||
|
||||
this.processor = method.getResultProcessor();
|
||||
this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType());
|
||||
this.isGeoNearQuery = method.isGeoNearQuery();
|
||||
this.context = mongoOperations.getConverter().getMappingContext();
|
||||
}
|
||||
@@ -86,6 +93,18 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
String fieldSpec = this.getQueryMethod().getFieldSpecification();
|
||||
|
||||
if (!StringUtils.hasText(fieldSpec)) {
|
||||
|
||||
ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType();
|
||||
|
||||
if (returnedType.isProjecting()) {
|
||||
|
||||
Field fields = query.fields();
|
||||
|
||||
for (String field : returnedType.getInputProperties()) {
|
||||
fields.include(field);
|
||||
}
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,14 +21,15 @@ import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext;
|
||||
import org.springframework.data.repository.query.EvaluationContextProvider;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -46,7 +47,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class);
|
||||
private static final ParameterBindingParser PARSER = ParameterBindingParser.INSTANCE;
|
||||
private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE;
|
||||
|
||||
private final String query;
|
||||
private final String fieldSpec;
|
||||
@@ -54,33 +55,45 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
private final boolean isDeleteQuery;
|
||||
private final List<ParameterBinding> queryParameterBindings;
|
||||
private final List<ParameterBinding> fieldSpecParameterBindings;
|
||||
private final ExpressionEvaluatingParameterBinder parameterBinder;
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
*
|
||||
* @param method must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param expressionParser must not be {@literal null}.
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
*/
|
||||
public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
this(method.getAnnotatedQuery(), method, mongoOperations);
|
||||
public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations,
|
||||
SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) {
|
||||
this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod} and
|
||||
* {@link MongoOperations}.
|
||||
*
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod},
|
||||
* {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param method must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param expressionParser must not be {@literal null}.
|
||||
*/
|
||||
public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperations mongoOperations,
|
||||
SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) {
|
||||
|
||||
super(method, mongoOperations);
|
||||
|
||||
this.query = query;
|
||||
this.queryParameterBindings = PARSER.parseParameterBindingsFrom(query);
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(expressionParser, "SpelExpressionParser must not be null!");
|
||||
|
||||
this.fieldSpec = method.getFieldSpecification();
|
||||
this.fieldSpecParameterBindings = PARSER.parseParameterBindingsFrom(method.getFieldSpecification());
|
||||
this.queryParameterBindings = new ArrayList<ParameterBinding>();
|
||||
this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query,
|
||||
this.queryParameterBindings);
|
||||
|
||||
this.fieldSpecParameterBindings = new ArrayList<ParameterBinding>();
|
||||
this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(
|
||||
method.getFieldSpecification(), this.fieldSpecParameterBindings);
|
||||
|
||||
this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false;
|
||||
this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false;
|
||||
@@ -88,6 +101,8 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
if (isCountQuery && isDeleteQuery) {
|
||||
throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method));
|
||||
}
|
||||
|
||||
this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -97,21 +112,15 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
@Override
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
String queryString = replacePlaceholders(query, accessor, queryParameterBindings);
|
||||
String queryString = parameterBinder.bind(this.query, accessor, new BindingContext(getQueryMethod()
|
||||
.getParameters(), queryParameterBindings));
|
||||
String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, new BindingContext(getQueryMethod()
|
||||
.getParameters(), fieldSpecParameterBindings));
|
||||
|
||||
Query query = null;
|
||||
|
||||
if (fieldSpec != null) {
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor, fieldSpecParameterBindings);
|
||||
query = new BasicQuery(queryString, fieldString);
|
||||
} else {
|
||||
query = new BasicQuery(queryString);
|
||||
}
|
||||
|
||||
query.with(accessor.getSort());
|
||||
Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort());
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug(String.format("Created query %s", query.getQueryObject()));
|
||||
LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject()));
|
||||
}
|
||||
|
||||
return query;
|
||||
@@ -135,62 +144,6 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return this.isDeleteQuery;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaced the parameter place-holders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input
|
||||
* @param accessor
|
||||
* @param bindings
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor, List<ParameterBinding> bindings) {
|
||||
|
||||
if (bindings.isEmpty()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindings) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx != -1) {
|
||||
result.replace(idx, idx + parameter.length(), getParameterValueForBinding(accessor, binding));
|
||||
}
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor
|
||||
* @param binding
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(ConvertingParameterAccessor accessor, ParameterBinding binding) {
|
||||
|
||||
Object value = accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
if (value instanceof byte[]) {
|
||||
|
||||
String base64representation = DatatypeConverter.printBase64Binary((byte[]) value);
|
||||
if (!binding.isQuoted()) {
|
||||
return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}";
|
||||
}
|
||||
return base64representation;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* A parser that extracts the parameter bindings from a given query string.
|
||||
*
|
||||
@@ -200,6 +153,12 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final String EXPRESSION_PARAM_QUOTE = "'";
|
||||
private static final String EXPRESSION_PARAM_PREFIX = "?expr";
|
||||
private static final String INDEX_BASED_EXPRESSION_PARAM_START = "?#{";
|
||||
private static final String NAME_BASED_EXPRESSION_PARAM_START = ":#{";
|
||||
private static final char CURRLY_BRACE_OPEN = '{';
|
||||
private static final char CURRLY_BRACE_CLOSE = '}';
|
||||
private static final String PARAMETER_PREFIX = "_param_";
|
||||
private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\"";
|
||||
private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)");
|
||||
@@ -211,34 +170,84 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
* Returns a list of {@link ParameterBinding}s found in the given {@code input} or an
|
||||
* {@link Collections#emptyList()}.
|
||||
*
|
||||
* @param input
|
||||
* @param conversionService must not be {@literal null}.
|
||||
* @param input can be {@literal null} or empty.
|
||||
* @param bindings must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public List<ParameterBinding> parseParameterBindingsFrom(String input) {
|
||||
public String parseAndCollectParameterBindingsFromQueryIntoBindings(String input, List<ParameterBinding> bindings) {
|
||||
|
||||
if (!StringUtils.hasText(input)) {
|
||||
return Collections.emptyList();
|
||||
return input;
|
||||
}
|
||||
|
||||
List<ParameterBinding> bindings = new ArrayList<ParameterBinding>();
|
||||
Assert.notNull(bindings, "Parameter bindings must not be null!");
|
||||
|
||||
String parseableInput = makeParameterReferencesParseable(input);
|
||||
String transformedInput = transformQueryAndCollectExpressionParametersIntoBindings(input, bindings);
|
||||
String parseableInput = makeParameterReferencesParseable(transformedInput);
|
||||
|
||||
collectParameterReferencesIntoBindings(bindings, JSON.parse(parseableInput));
|
||||
|
||||
return bindings;
|
||||
return transformedInput;
|
||||
}
|
||||
|
||||
private String makeParameterReferencesParseable(String input) {
|
||||
private static String transformQueryAndCollectExpressionParametersIntoBindings(String input,
|
||||
List<ParameterBinding> bindings) {
|
||||
|
||||
StringBuilder result = new StringBuilder();
|
||||
|
||||
int startIndex = 0;
|
||||
int currentPos = 0;
|
||||
int exprIndex = 0;
|
||||
|
||||
while (currentPos < input.length()) {
|
||||
|
||||
int indexOfExpressionParameter = getIndexOfExpressionParameter(input, currentPos);
|
||||
|
||||
// no expression parameter found
|
||||
if (indexOfExpressionParameter < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
int exprStart = indexOfExpressionParameter + 3;
|
||||
currentPos = exprStart;
|
||||
|
||||
// eat parameter expression
|
||||
int curlyBraceOpenCnt = 1;
|
||||
|
||||
while (curlyBraceOpenCnt > 0) {
|
||||
switch (input.charAt(currentPos++)) {
|
||||
case CURRLY_BRACE_OPEN:
|
||||
curlyBraceOpenCnt++;
|
||||
break;
|
||||
case CURRLY_BRACE_CLOSE:
|
||||
curlyBraceOpenCnt--;
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
result.append(input.subSequence(startIndex, indexOfExpressionParameter));
|
||||
result.append(EXPRESSION_PARAM_QUOTE).append(EXPRESSION_PARAM_PREFIX);
|
||||
result.append(exprIndex);
|
||||
result.append(EXPRESSION_PARAM_QUOTE);
|
||||
|
||||
bindings.add(new ParameterBinding(exprIndex, true, input.substring(exprStart, currentPos - 1)));
|
||||
|
||||
startIndex = currentPos;
|
||||
|
||||
exprIndex++;
|
||||
}
|
||||
|
||||
return result.append(input.subSequence(currentPos, input.length())).toString();
|
||||
}
|
||||
|
||||
private static String makeParameterReferencesParseable(String input) {
|
||||
|
||||
Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input);
|
||||
String parseableInput = matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
|
||||
return parseableInput;
|
||||
return matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
}
|
||||
|
||||
private void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
private static void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
|
||||
if (value instanceof String) {
|
||||
|
||||
@@ -248,9 +257,10 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
} else if (value instanceof Pattern) {
|
||||
|
||||
String string = ((Pattern) value).toString().trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
|
||||
while (valueMatcher.find()) {
|
||||
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
|
||||
/*
|
||||
@@ -280,7 +290,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAddBinding(String source, List<ParameterBinding> bindings) {
|
||||
private static void potentiallyAddBinding(String source, List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(source);
|
||||
|
||||
@@ -293,6 +303,14 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
}
|
||||
|
||||
private static int getIndexOfExpressionParameter(String input, int position) {
|
||||
|
||||
int indexOfExpressionParameter = input.indexOf(INDEX_BASED_EXPRESSION_PARAM_START, position);
|
||||
|
||||
return indexOfExpressionParameter < 0 ? input.indexOf(NAME_BASED_EXPRESSION_PARAM_START, position)
|
||||
: indexOfExpressionParameter;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -300,10 +318,11 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class ParameterBinding {
|
||||
static class ParameterBinding {
|
||||
|
||||
private final int parameterIndex;
|
||||
private final boolean quoted;
|
||||
private final String expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information.
|
||||
@@ -312,9 +331,14 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
* @param quoted whether or not the parameter is already quoted.
|
||||
*/
|
||||
public ParameterBinding(int parameterIndex, boolean quoted) {
|
||||
this(parameterIndex, quoted, null);
|
||||
}
|
||||
|
||||
public ParameterBinding(int parameterIndex, boolean quoted, String expression) {
|
||||
|
||||
this.parameterIndex = parameterIndex;
|
||||
this.quoted = quoted;
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
public boolean isQuoted() {
|
||||
@@ -326,7 +350,15 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
}
|
||||
|
||||
public String getParameter() {
|
||||
return "?" + parameterIndex;
|
||||
return "?" + (isExpression() ? "expr" : "") + parameterIndex;
|
||||
}
|
||||
|
||||
public String getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
public boolean isExpression() {
|
||||
return this.expression != null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.repository.core.support.PersistentEntityInformation;
|
||||
@@ -27,12 +28,14 @@ import org.springframework.data.repository.core.support.PersistentEntityInformat
|
||||
* {@link MongoPersistentEntity} if given.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoEntityInformation<T, ID extends Serializable> extends PersistentEntityInformation<T, ID>
|
||||
implements MongoEntityInformation<T, ID> {
|
||||
|
||||
private final MongoPersistentEntity<T> entityMetadata;
|
||||
private final String customCollectionName;
|
||||
private final Class<ID> fallbackIdType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity}.
|
||||
@@ -40,7 +43,18 @@ public class MappingMongoEntityInformation<T, ID extends Serializable> extends P
|
||||
* @param entity must not be {@literal null}.
|
||||
*/
|
||||
public MappingMongoEntityInformation(MongoPersistentEntity<T> entity) {
|
||||
this(entity, null);
|
||||
this(entity, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity} and fallback
|
||||
* identifier type.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param fallbackIdType can be {@literal null}.
|
||||
*/
|
||||
public MappingMongoEntityInformation(MongoPersistentEntity<T> entity, Class<ID> fallbackIdType) {
|
||||
this(entity, (String) null, fallbackIdType);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -51,11 +65,26 @@ public class MappingMongoEntityInformation<T, ID extends Serializable> extends P
|
||||
* @param customCollectionName can be {@literal null}.
|
||||
*/
|
||||
public MappingMongoEntityInformation(MongoPersistentEntity<T> entity, String customCollectionName) {
|
||||
this(entity, customCollectionName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity}, collection name
|
||||
* and identifier type.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param customCollectionName can be {@literal null}.
|
||||
* @param idType can be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private MappingMongoEntityInformation(MongoPersistentEntity<T> entity, String customCollectionName,
|
||||
Class<ID> idType) {
|
||||
|
||||
super(entity);
|
||||
|
||||
this.entityMetadata = entity;
|
||||
this.customCollectionName = customCollectionName;
|
||||
this.fallbackIdType = idType != null ? idType : (Class<ID>) ObjectId.class;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -71,4 +100,19 @@ public class MappingMongoEntityInformation<T, ID extends Serializable> extends P
|
||||
public String getIdAttribute() {
|
||||
return entityMetadata.getIdProperty().getName();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.core.support.PersistentEntityInformation#getIdType()
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Class<ID> getIdType() {
|
||||
|
||||
if (this.entityMetadata.hasIdProperty()) {
|
||||
return super.getIdType();
|
||||
}
|
||||
|
||||
return fallbackIdType != null ? fallbackIdType : (Class<ID>) ObjectId.class;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,14 +25,14 @@ import javax.tools.Diagnostic;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
import com.mysema.query.annotations.QueryEmbeddable;
|
||||
import com.mysema.query.annotations.QueryEmbedded;
|
||||
import com.mysema.query.annotations.QueryEntities;
|
||||
import com.mysema.query.annotations.QuerySupertype;
|
||||
import com.mysema.query.annotations.QueryTransient;
|
||||
import com.mysema.query.apt.AbstractQuerydslProcessor;
|
||||
import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
import com.querydsl.apt.AbstractQuerydslProcessor;
|
||||
import com.querydsl.apt.Configuration;
|
||||
import com.querydsl.apt.DefaultConfiguration;
|
||||
import com.querydsl.core.annotations.QueryEmbeddable;
|
||||
import com.querydsl.core.annotations.QueryEmbedded;
|
||||
import com.querydsl.core.annotations.QueryEntities;
|
||||
import com.querydsl.core.annotations.QuerySupertype;
|
||||
import com.querydsl.core.annotations.QueryTransient;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,34 +30,43 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryMethod;
|
||||
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
|
||||
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.querydsl.QueryDslPredicateExecutor;
|
||||
import org.springframework.data.repository.core.NamedQueries;
|
||||
import org.springframework.data.repository.core.RepositoryInformation;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
|
||||
import org.springframework.data.repository.query.EvaluationContextProvider;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Factory to create {@link MongoRepository} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoRepositoryFactory} with the given {@link MongoOperations}.
|
||||
*
|
||||
* @param mongoOperations must not be {@literal null}
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
public MongoRepositoryFactory(MongoOperations mongoOperations) {
|
||||
|
||||
Assert.notNull(mongoOperations);
|
||||
this.mongoOperations = mongoOperations;
|
||||
|
||||
this.operations = mongoOperations;
|
||||
this.mappingContext = mongoOperations.getConverter().getMappingContext();
|
||||
}
|
||||
|
||||
@@ -67,84 +76,97 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getRepositoryBaseClass(RepositoryMetadata metadata) {
|
||||
return isQueryDslRepository(metadata.getRepositoryInterface()) ? QueryDslMongoRepository.class
|
||||
: SimpleMongoRepository.class;
|
||||
|
||||
boolean isQueryDslRepository = QUERY_DSL_PRESENT
|
||||
&& QueryDslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface());
|
||||
|
||||
return isQueryDslRepository ? QueryDslMongoRepository.class : SimpleMongoRepository.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryMetadata)
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
protected Object getTargetRepository(RepositoryMetadata metadata) {
|
||||
protected Object getTargetRepository(RepositoryInformation information) {
|
||||
|
||||
Class<?> repositoryInterface = metadata.getRepositoryInterface();
|
||||
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(metadata.getDomainType());
|
||||
|
||||
if (isQueryDslRepository(repositoryInterface)) {
|
||||
return new QueryDslMongoRepository(entityInformation, mongoOperations);
|
||||
} else {
|
||||
return new SimpleMongoRepository(entityInformation, mongoOperations);
|
||||
}
|
||||
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(information.getDomainType(),
|
||||
information);
|
||||
return getTargetRepositoryViaReflection(information, entityInformation, operations);
|
||||
}
|
||||
|
||||
private static boolean isQueryDslRepository(Class<?> repositoryInterface) {
|
||||
|
||||
return QUERY_DSL_PRESENT && QueryDslPredicateExecutor.class.isAssignableFrom(repositoryInterface);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key)
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider)
|
||||
*/
|
||||
@Override
|
||||
protected QueryLookupStrategy getQueryLookupStrategy(Key key) {
|
||||
return new MongoQueryLookupStrategy();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class MongoQueryLookupStrategy implements QueryLookupStrategy {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.repository.core.NamedQueries)
|
||||
*/
|
||||
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, NamedQueries namedQueries) {
|
||||
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, mappingContext);
|
||||
String namedQueryName = queryMethod.getNamedQueryName();
|
||||
|
||||
if (namedQueries.hasQuery(namedQueryName)) {
|
||||
String namedQuery = namedQueries.getQuery(namedQueryName);
|
||||
return new StringBasedMongoQuery(namedQuery, queryMethod, mongoOperations);
|
||||
} else if (queryMethod.hasAnnotatedQuery()) {
|
||||
return new StringBasedMongoQuery(queryMethod, mongoOperations);
|
||||
} else {
|
||||
return new PartTreeMongoQuery(queryMethod, mongoOperations);
|
||||
}
|
||||
}
|
||||
protected QueryLookupStrategy getQueryLookupStrategy(Key key, EvaluationContextProvider evaluationContextProvider) {
|
||||
return new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T, ID extends Serializable> MongoEntityInformation<T, ID> getEntityInformation(Class<T> domainClass) {
|
||||
return getEntityInformation(domainClass, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T, ID extends Serializable> MongoEntityInformation<T, ID> getEntityInformation(Class<T> domainClass,
|
||||
RepositoryInformation information) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(domainClass);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("Could not lookup mapping metadata for domain class %s!",
|
||||
domainClass.getName()));
|
||||
throw new MappingException(
|
||||
String.format("Could not lookup mapping metadata for domain class %s!", domainClass.getName()));
|
||||
}
|
||||
|
||||
return new MappingMongoEntityInformation<T, ID>((MongoPersistentEntity<T>) entity);
|
||||
return new MappingMongoEntityInformation<T, ID>((MongoPersistentEntity<T>) entity,
|
||||
information != null ? (Class<ID>) information.getIdType() : null);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MongoQueryLookupStrategy implements QueryLookupStrategy {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final EvaluationContextProvider evaluationContextProvider;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
public MongoQueryLookupStrategy(MongoOperations operations, EvaluationContextProvider evaluationContextProvider,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.operations = operations;
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries)
|
||||
*/
|
||||
@Override
|
||||
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
|
||||
NamedQueries namedQueries) {
|
||||
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, mappingContext);
|
||||
String namedQueryName = queryMethod.getNamedQueryName();
|
||||
|
||||
if (namedQueries.hasQuery(namedQueryName)) {
|
||||
String namedQuery = namedQueries.getQuery(namedQueryName);
|
||||
return new StringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER,
|
||||
evaluationContextProvider);
|
||||
} else if (queryMethod.hasAnnotatedQuery()) {
|
||||
return new StringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider);
|
||||
} else {
|
||||
return new PartTreeMongoQuery(queryMethod, operations);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,12 +34,12 @@ import org.springframework.data.repository.core.EntityInformation;
|
||||
import org.springframework.data.repository.core.EntityMetadata;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
import com.mysema.query.types.EntityPath;
|
||||
import com.mysema.query.types.Expression;
|
||||
import com.mysema.query.types.OrderSpecifier;
|
||||
import com.mysema.query.types.Predicate;
|
||||
import com.mysema.query.types.path.PathBuilder;
|
||||
import com.querydsl.core.types.EntityPath;
|
||||
import com.querydsl.core.types.Expression;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.querydsl.core.types.dsl.PathBuilder;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
|
||||
/**
|
||||
* Special QueryDsl based repository implementation that allows execution {@link Predicate}s in various forms.
|
||||
@@ -47,8 +47,8 @@ import com.mysema.query.types.path.PathBuilder;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID> implements
|
||||
QueryDslPredicateExecutor<T> {
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID>
|
||||
implements QueryDslPredicateExecutor<T> {
|
||||
|
||||
private final PathBuilder<T> builder;
|
||||
private final EntityInformation<T, ID> entityInformation;
|
||||
@@ -92,7 +92,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public T findOne(Predicate predicate) {
|
||||
return createQueryFor(predicate).uniqueResult();
|
||||
return createQueryFor(predicate).fetchOne();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,7 +101,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate) {
|
||||
return createQueryFor(predicate).list();
|
||||
return createQueryFor(predicate).fetchResults().getResults();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -110,7 +110,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders) {
|
||||
return createQueryFor(predicate).orderBy(orders).list();
|
||||
return createQueryFor(predicate).orderBy(orders).fetchResults().getResults();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -119,7 +119,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate, Sort sort) {
|
||||
return applySorting(createQueryFor(predicate), sort).list();
|
||||
return applySorting(createQueryFor(predicate), sort).fetchResults().getResults();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -128,7 +128,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public Iterable<T> findAll(OrderSpecifier<?>... orders) {
|
||||
return createQuery().orderBy(orders).list();
|
||||
return createQuery().orderBy(orders).fetchResults().getResults();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -138,10 +138,11 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
@Override
|
||||
public Page<T> findAll(Predicate predicate, Pageable pageable) {
|
||||
|
||||
MongodbQuery<T> countQuery = createQueryFor(predicate);
|
||||
MongodbQuery<T> query = createQueryFor(predicate);
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> countQuery = createQueryFor(predicate);
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query = createQueryFor(predicate);
|
||||
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
return new PageImpl<T>(applyPagination(query, pageable).fetchResults().getResults(), pageable,
|
||||
countQuery.fetchCount());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -151,10 +152,11 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
@Override
|
||||
public Page<T> findAll(Pageable pageable) {
|
||||
|
||||
MongodbQuery<T> countQuery = createQuery();
|
||||
MongodbQuery<T> query = createQuery();
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> countQuery = createQuery();
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query = createQuery();
|
||||
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
return new PageImpl<T>(applyPagination(query, pageable).fetchResults().getResults(), pageable,
|
||||
countQuery.fetchCount());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -163,7 +165,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Sort sort) {
|
||||
return applySorting(createQuery(), sort).list();
|
||||
return applySorting(createQuery(), sort).fetchResults().getResults();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -172,7 +174,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public long count(Predicate predicate) {
|
||||
return createQueryFor(predicate).count();
|
||||
return createQueryFor(predicate).fetchCount();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -181,7 +183,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public boolean exists(Predicate predicate) {
|
||||
return createQueryFor(predicate).exists();
|
||||
return createQueryFor(predicate).fetchCount() > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -190,7 +192,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param predicate
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> createQueryFor(Predicate predicate) {
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> createQueryFor(Predicate predicate) {
|
||||
return createQuery().where(predicate);
|
||||
}
|
||||
|
||||
@@ -199,7 +201,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> createQuery() {
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> createQuery() {
|
||||
return new SpringDataMongodbQuery<T>(mongoOperations, entityInformation.getJavaType());
|
||||
}
|
||||
|
||||
@@ -210,7 +212,8 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param pageable
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> applyPagination(MongodbQuery<T> query, Pageable pageable) {
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> applyPagination(
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query, Pageable pageable) {
|
||||
|
||||
if (pageable == null) {
|
||||
return query;
|
||||
@@ -227,7 +230,8 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param sort
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> applySorting(MongodbQuery<T> query, Sort sort) {
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> applySorting(
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query, Sort sort) {
|
||||
|
||||
if (sort == null) {
|
||||
return query;
|
||||
@@ -260,7 +264,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
Expression<Object> property = builder.get(order.getProperty());
|
||||
|
||||
return new OrderSpecifier(order.isAscending() ? com.mysema.query.types.Order.ASC
|
||||
: com.mysema.query.types.Order.DESC, property);
|
||||
return new OrderSpecifier(
|
||||
order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,8 +20,8 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
import com.mysema.query.types.EntityPath;
|
||||
import com.querydsl.core.types.EntityPath;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
|
||||
/**
|
||||
* Base class to create repository implementations based on Querydsl.
|
||||
@@ -36,7 +36,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
/**
|
||||
* Creates a new {@link QuerydslRepositorySupport} for the given {@link MongoOperations}.
|
||||
*
|
||||
* @param operations must not be {@literal null}
|
||||
* @param operations must not be {@literal null}.
|
||||
*/
|
||||
public QuerydslRepositorySupport(MongoOperations operations) {
|
||||
|
||||
@@ -53,7 +53,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
protected <T> MongodbQuery<T> from(final EntityPath<T> path) {
|
||||
protected <T> AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> from(final EntityPath<T> path) {
|
||||
Assert.notNull(path);
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(path.getType());
|
||||
return from(path, entity.getCollection());
|
||||
@@ -66,7 +66,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
* @param collection must not be blank or {@literal null}
|
||||
* @return
|
||||
*/
|
||||
protected <T> MongodbQuery<T> from(final EntityPath<T> path, String collection) {
|
||||
protected <T> AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> from(final EntityPath<T> path, String collection) {
|
||||
|
||||
Assert.notNull(path);
|
||||
Assert.hasText(collection);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,6 +25,7 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -43,6 +44,7 @@ import org.springframework.util.Assert;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class SimpleMongoRepository<T, ID extends Serializable> implements MongoRepository<T, ID> {
|
||||
|
||||
@@ -53,7 +55,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
* Creates a new {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}.
|
||||
*
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
public SimpleMongoRepository(MongoEntityInformation<T, ID> metadata, MongoOperations mongoOperations) {
|
||||
|
||||
@@ -259,6 +261,93 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
return list;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Pageable)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> Page<S> findAll(Example<S> example, Pageable pageable) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example)).with(pageable);
|
||||
|
||||
long count = mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
|
||||
if (count == 0) {
|
||||
return new PageImpl<S>(Collections.<S> emptyList());
|
||||
}
|
||||
|
||||
return new PageImpl<S>(mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()),
|
||||
pageable, count);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> List<S> findAll(Example<S> example, Sort sort) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
|
||||
if (sort != null) {
|
||||
q.with(sort);
|
||||
}
|
||||
|
||||
return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> List<S> findAll(Example<S> example) {
|
||||
return findAll(example, (Sort) null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> S findOne(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#count(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> long count(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#exists(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> boolean exists(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
private List<T> findAll(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
@@ -291,4 +380,5 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
private static int tryDetermineRealSizeOrReturn(Iterable<?> iterable, int defaultSize) {
|
||||
return iterable == null ? 0 : (iterable instanceof Collection) ? ((Collection<?>) iterable).size() : defaultSize;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,14 +20,14 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import com.google.common.base.Function;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
|
||||
/**
|
||||
* Spring Data specfic {@link MongodbQuery} implementation.
|
||||
* Spring Data specific {@link MongodbQuery} implementation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class SpringDataMongodbQuery<T> extends MongodbQuery<T> {
|
||||
class SpringDataMongodbQuery<T> extends AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> {
|
||||
|
||||
private final MongoOperations operations;
|
||||
|
||||
@@ -48,7 +48,8 @@ class SpringDataMongodbQuery<T> extends MongodbQuery<T> {
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
*/
|
||||
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type, String collectionName) {
|
||||
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type,
|
||||
String collectionName) {
|
||||
|
||||
super(operations.getCollection(collectionName), new Function<DBObject, T>() {
|
||||
public T apply(DBObject input) {
|
||||
@@ -61,7 +62,7 @@ class SpringDataMongodbQuery<T> extends MongodbQuery<T> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbQuery#getCollection(java.lang.Class)
|
||||
* @see com.querydsl.mongodb.AbstractMongodbQuery#getCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
protected DBCollection getCollection(Class<?> type) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,15 +26,15 @@ import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Constant;
|
||||
import com.mysema.query.types.Operation;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
import com.querydsl.core.types.Constant;
|
||||
import com.querydsl.core.types.Path;
|
||||
import com.querydsl.core.types.PathMetadata;
|
||||
import com.querydsl.core.types.PathType;
|
||||
import com.querydsl.mongodb.MongodbSerializer;
|
||||
|
||||
/**
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
@@ -76,10 +76,24 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#getKeyForPath(com.mysema.query.types.Path, com.mysema.query.types.PathMetadata)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#visit(com.querydsl.core.types.Constant, java.lang.Void)
|
||||
*/
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
|
||||
public Object visit(Constant<?> expr, Void context) {
|
||||
|
||||
if (!ClassUtils.isAssignable(Enum.class, expr.getType())) {
|
||||
return super.visit(expr, context);
|
||||
}
|
||||
|
||||
return converter.convertToMongoType(expr.getConstant());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#getKeyForPath(com.querydsl.core.types.Path, com.querydsl.core.types.PathMetadata)
|
||||
*/
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata metadata) {
|
||||
|
||||
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
|
||||
return super.getKeyForPath(expr, metadata);
|
||||
@@ -94,7 +108,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBObject asDBObject(String key, Object value) {
|
||||
@@ -108,7 +122,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#isReference(com.mysema.query.types.Path)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#isReference(com.querydsl.core.types.Path)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isReference(Path<?> path) {
|
||||
@@ -119,34 +133,13 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Object constant) {
|
||||
return converter.toDBRef(constant, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(com.mysema.query.types.Operation, int)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Operation<?> expr, int constIndex) {
|
||||
|
||||
for (Object arg : expr.getArgs()) {
|
||||
|
||||
if (arg instanceof Path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor((Path<?>) arg);
|
||||
Object constant = ((Constant<?>) expr.getArg(constIndex)).getConstant();
|
||||
|
||||
return converter.toDBRef(constant, property);
|
||||
}
|
||||
}
|
||||
|
||||
return super.asReference(expr, constIndex);
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(Path<?> path) {
|
||||
|
||||
Path<?> parent = path.getMetadata().getParent();
|
||||
|
||||
@@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
/**
|
||||
* {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
public final class MongoDbErrorCodes {
|
||||
|
||||
static HashMap<Integer, String> dataAccessResourceFailureCodes;
|
||||
static HashMap<Integer, String> dataIntegrityViolationCodes;
|
||||
static HashMap<Integer, String> duplicateKeyCodes;
|
||||
static HashMap<Integer, String> invalidDataAccessApiUsageExeption;
|
||||
static HashMap<Integer, String> permissionDeniedCodes;
|
||||
|
||||
static HashMap<Integer, String> errorCodes;
|
||||
|
||||
static {
|
||||
|
||||
dataAccessResourceFailureCodes = new HashMap<Integer, String>(10);
|
||||
dataAccessResourceFailureCodes.put(6, "HostUnreachable");
|
||||
dataAccessResourceFailureCodes.put(7, "HostNotFound");
|
||||
dataAccessResourceFailureCodes.put(89, "NetworkTimeout");
|
||||
dataAccessResourceFailureCodes.put(91, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(12000, "SlaveDelayDifferential");
|
||||
dataAccessResourceFailureCodes.put(10084, "CannotFindMapFile64Bit");
|
||||
dataAccessResourceFailureCodes.put(10085, "CannotFindMapFile");
|
||||
dataAccessResourceFailureCodes.put(10357, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(10359, "Header==0");
|
||||
dataAccessResourceFailureCodes.put(13440, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt");
|
||||
|
||||
dataIntegrityViolationCodes = new HashMap<Integer, String>(6);
|
||||
dataIntegrityViolationCodes.put(67, "CannotCreateIndex");
|
||||
dataIntegrityViolationCodes.put(68, "IndexAlreadyExists");
|
||||
dataIntegrityViolationCodes.put(85, "IndexOptionsConflict");
|
||||
dataIntegrityViolationCodes.put(86, "IndexKeySpecsConflict");
|
||||
dataIntegrityViolationCodes.put(112, "WriteConflict");
|
||||
dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress");
|
||||
|
||||
duplicateKeyCodes = new HashMap<Integer, String>(3);
|
||||
duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey");
|
||||
duplicateKeyCodes.put(84, "DuplicateKeyValue");
|
||||
duplicateKeyCodes.put(11000, "DuplicateKey");
|
||||
duplicateKeyCodes.put(11001, "DuplicateKey");
|
||||
|
||||
invalidDataAccessApiUsageExeption = new HashMap<Integer, String>();
|
||||
invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle");
|
||||
invalidDataAccessApiUsageExeption.put(9, "FailedToParse");
|
||||
invalidDataAccessApiUsageExeption.put(14, "TypeMismatch");
|
||||
invalidDataAccessApiUsageExeption.put(15, "Overflow");
|
||||
invalidDataAccessApiUsageExeption.put(16, "InvalidLength");
|
||||
invalidDataAccessApiUsageExeption.put(20, "IllegalOperation");
|
||||
invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation");
|
||||
invalidDataAccessApiUsageExeption.put(22, "InvalidBSON");
|
||||
invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized");
|
||||
invalidDataAccessApiUsageExeption.put(29, "NonExistentPath");
|
||||
invalidDataAccessApiUsageExeption.put(30, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators");
|
||||
invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent");
|
||||
invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(52, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(53, "InvalidIdField");
|
||||
invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField");
|
||||
invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef");
|
||||
invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(57, "DottedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(59, "CommandNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported");
|
||||
invalidDataAccessApiUsageExeption.put(66, "ImmutableField");
|
||||
invalidDataAccessApiUsageExeption.put(72, "InvalidOptions");
|
||||
invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported");
|
||||
invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped");
|
||||
invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong");
|
||||
invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig");
|
||||
|
||||
permissionDeniedCodes = new HashMap<Integer, String>();
|
||||
permissionDeniedCodes.put(11, "UserNotFound");
|
||||
permissionDeniedCodes.put(18, "AuthenticationFailed");
|
||||
permissionDeniedCodes.put(31, "RoleNotFound");
|
||||
permissionDeniedCodes.put(32, "RolesNotRelated");
|
||||
permissionDeniedCodes.put(33, "PrvilegeNotFound");
|
||||
permissionDeniedCodes.put(15847, "CannotAuthenticate");
|
||||
permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB");
|
||||
permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB");
|
||||
|
||||
errorCodes = new HashMap<Integer, String>();
|
||||
errorCodes.putAll(dataAccessResourceFailureCodes);
|
||||
errorCodes.putAll(dataIntegrityViolationCodes);
|
||||
errorCodes.putAll(duplicateKeyCodes);
|
||||
errorCodes.putAll(invalidDataAccessApiUsageExeption);
|
||||
errorCodes.putAll(permissionDeniedCodes);
|
||||
}
|
||||
|
||||
public static boolean isDataIntegrityViolationCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDataAccessResourceFailureCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDuplicateKeyCode(Integer errorCode) {
|
||||
return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isPermissionDeniedCode(Integer errorCode) {
|
||||
return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isInvalidDataAccessApiUsageCode(Integer errorCode) {
|
||||
return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static String getErrorDescription(Integer errorCode) {
|
||||
return errorCode == null ? null : errorCodes.get(errorCode);
|
||||
}
|
||||
}
|
||||
@@ -5,4 +5,5 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/sprin
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.5.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.7.xsd=org/springframework/data/mongodb/config/spring-mongo-1.7.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.7.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.8.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used for accessing MongoDB.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
@@ -31,7 +31,7 @@ Defines a Mongo instance used for accessing MongoDB'.
|
||||
<xsd:element name="mongo-client" type="mongoClientType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoClientFactoryBean"><![CDATA[
|
||||
Defines a MongoClient instance used for accessing MongoDB'.
|
||||
Defines a MongoClient instance used for accessing MongoDB.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
@@ -72,14 +72,14 @@ The name of the database to connect to. Default is 'db'.
|
||||
<xsd:attribute name="authentication-dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the authentication database to connect to. Default is 'db'.
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The name of the authentication database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
@@ -93,14 +93,14 @@ The host to connect to a MongoDB server. Default is localhost
|
||||
<xsd:attribute name="username" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The username to use when connecting to a MongoDB server.
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The username to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="password" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The password to use when connecting to a MongoDB server.
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The password to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
@@ -382,6 +382,11 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
</xsd:attributeGroup>
|
||||
-->
|
||||
<xsd:complexType name="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="options" type="optionsType">
|
||||
<xsd:annotation>
|
||||
@@ -439,6 +444,11 @@ The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
@@ -542,6 +552,11 @@ The SSLSocketFactory to use for the SSL connection. If none is configured here,
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="mongoClientType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Configuration options for 'MongoClient' - @since 1.7
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="client-options" type="clientOptionsType">
|
||||
<xsd:annotation>
|
||||
@@ -593,10 +608,15 @@ The comma delimited list of username:password@database entries to use for authen
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="clientOptionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Configuration options for 'MongoClientOptions' - @since 1.7
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:attribute name="description" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The MongoClient description.
|
||||
The MongoClient description.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
|
||||
@@ -0,0 +1,894 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<xsd:schema xmlns="http://www.springframework.org/schema/data/mongo"
|
||||
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:beans="http://www.springframework.org/schema/beans"
|
||||
xmlns:tool="http://www.springframework.org/schema/tool"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:repository="http://www.springframework.org/schema/data/repository"
|
||||
targetNamespace="http://www.springframework.org/schema/data/mongo"
|
||||
elementFormDefault="qualified" attributeFormDefault="unqualified">
|
||||
|
||||
<xsd:import namespace="http://www.springframework.org/schema/beans" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/tool" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/context" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/data/repository"
|
||||
schemaLocation="http://www.springframework.org/schema/data/repository/spring-repository.xsd" />
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used for accessing MongoDB.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.Mongo"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="mongo-client" type="mongoClientType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoClientFactoryBean"><![CDATA[
|
||||
Defines a MongoClient instance used for accessing MongoDB.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoClient"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="db-factory">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongo instance. If not configured a default com.mongodb.Mongo instance will be created.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="authentication-dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The name of the authentication database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="username" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The username to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="password" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7 - Please use MongoClient internal authentication. The password to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="uri" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.8 - Please use client-uri instead. The Mongo URI string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="client-uri" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The MongoClientURI string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:attributeGroup name="mongo-repository-attributes">
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" default="mongoTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="create-query-indexes" type="xsd:boolean" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
Enables creation of indexes for queries that get derived from the method name
|
||||
and thus reference domain class properties. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
|
||||
<xsd:element name="repositories">
|
||||
<xsd:complexType>
|
||||
<xsd:complexContent>
|
||||
<xsd:extension base="repository:repositories">
|
||||
<xsd:attributeGroup ref="mongo-repository-attributes"/>
|
||||
<xsd:attributeGroup ref="repository:repository-attributes"/>
|
||||
</xsd:extension>
|
||||
</xsd:complexContent>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="mapping-converter">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[Defines a MongoConverter for getting rich mapping functionality.]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.convert.MappingMongoConverter" />
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="custom-converters" minOccurs="0">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Top-level element that contains one or more custom converters to be used for mapping
|
||||
domain objects to and from Mongo's DBObject]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="converter" type="customConverterType" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="base-package" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the MappingMongoConverter instance (by default "mappingConverter").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="base-package" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The base package in which to scan for entities annotated with @Document
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="type-mapper-ref" type="typeMapperRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTypeMapper to be used by this MappingMongoConverter.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mapping.model.MappingContext">
|
||||
The reference to a MappingContext. Will default to 'mappingContext'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="disable-validation" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener">
|
||||
Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="abbreviate-field-names" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy">
|
||||
Enables abbreviating the field names for domain class properties to the
|
||||
first character of their camel case names, e.g. fooBar -> fb. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="field-naming-strategy-ref" type="fieldNamingStrategyRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.FieldNamingStrategy">
|
||||
The reference to a FieldNamingStrategy.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="jmx">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a JMX Model MBeans for monitoring a MongoDB server'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the Mongo object that determines what server to monitor. (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="auditing">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.mapping.event.AuditingEventListener" />
|
||||
<tool:exports type="org.springframework.data.auditing.IsNewAwareAuditingHandler" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attributeGroup ref="repository:auditing-attributes" />
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:simpleType name="typeMapperRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoTypeMapper"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mappingContextRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mapping.model.MappingContext"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="fieldNamingStrategyRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.mapping.FieldNamingStrategy"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoTemplateRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="sslSocketFactoryRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="javax.net.ssl.SSLSocketFactory"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="writeConcernEnumeration">
|
||||
<xsd:restriction base="xsd:token">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICAS_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="readPreferenceEnumeration">
|
||||
<xsd:restriction base="xsd:token">
|
||||
<xsd:enumeration value="PRIMARY" />
|
||||
<xsd:enumeration value="PRIMARY_PREFERRED" />
|
||||
<xsd:enumeration value="SECONDARY" />
|
||||
<xsd:enumeration value="SECONDARY_PREFERRED" />
|
||||
<xsd:enumeration value="NEAREST" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup name="writeConcern">
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:simpleType>
|
||||
<xsd:restriction base="xsd:string">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICA_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
-->
|
||||
<xsd:complexType name="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="options" type="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo driver options
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoOptions"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup ref="writeConcern" />
|
||||
-->
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Deprecated since 1.7.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
|
||||
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
|
||||
then 50 threads can block more than that and an exception will be thrown.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-wait-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout in milliseconds. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-keep-alive" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="auto-connect-retry" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not on a connect, the system retries automatically. Default is false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-auto-connect-retry-time" type="xsd:long">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0, which means to use the default 15s if autoConnectRetry is on.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-number" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' option to the getlasterror command. Defaults to 0.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command. Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-fsync" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="slave-ok" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl" type="xsd:boolean">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver should us an SSL connection. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl-socket-factory-ref" type="sslSocketFactoryRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The SSLSocketFactory to use for the SSL connection. If none is configured here, SSLSocketFactory#getDefault() will be used.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="mongoClientType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Configuration options for 'MongoClient' - @since 1.7
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="client-options" type="clientOptionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo driver options
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoClientOptions"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoClient").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="credentials" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism. If the credential you're trying to pass contains a comma itself, quote it with single quotes: '…'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="clientOptionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Configuration options for 'MongoClientOptions' - @since 1.7
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:attribute name="description" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The MongoClient description.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="min-connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The minimum number of connections per host.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
|
||||
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
|
||||
then 50 threads can block more than that and an exception will be thrown.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-wait-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-connection-idle-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum idle time for a pooled connection.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-connection-life-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum life time for a pooled connection.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout in milliseconds. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-keep-alive" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="read-preference">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The read preference.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="readPreferenceEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="heartbeat-frequency" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This is the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="min-heartbeat-frequency" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
In the event that the driver has to frequently re-check a server's availability, it will wait at least this long since the previous check to avoid wasted effort.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="heartbeat-connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout for connections used for the cluster heartbeat.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="heartbeat-socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout for connections used for the cluster heartbeat.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl" type="xsd:boolean">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver should us an SSL connection. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl-socket-factory-ref" type="sslSocketFactoryRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The SSLSocketFactory to use for the SSL connection. If none is configured here, SSLSocketFactory#getDefault() will be used.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:group name="beanElementGroup">
|
||||
<xsd:choice>
|
||||
<xsd:element ref="beans:bean"/>
|
||||
<xsd:element ref="beans:ref"/>
|
||||
</xsd:choice>
|
||||
</xsd:group>
|
||||
|
||||
<xsd:complexType name="customConverterType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Element defining a custom converterr.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:group ref="beanElementGroup" minOccurs="0" maxOccurs="1"/>
|
||||
<xsd:attribute name="ref" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
A reference to a custom converter.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref"/>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:simpleType name="converterRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:element name="template">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string"
|
||||
use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to
|
||||
type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="gridFsTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="bucket" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The GridFs bucket string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
@@ -47,7 +47,7 @@ public class AuditingIntegrationTests {
|
||||
mappingContext.getPersistentEntity(Entity.class);
|
||||
|
||||
Entity entity = new Entity();
|
||||
BeforeConvertEvent<Entity> event = new BeforeConvertEvent<Entity>(entity);
|
||||
BeforeConvertEvent<Entity> event = new BeforeConvertEvent<Entity>(entity, "collection-1");
|
||||
context.publishEvent(event);
|
||||
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
@@ -55,7 +55,7 @@ public class AuditingIntegrationTests {
|
||||
|
||||
Thread.sleep(10);
|
||||
entity.id = 1L;
|
||||
event = new BeforeConvertEvent<Entity>(entity);
|
||||
event = new BeforeConvertEvent<Entity>(entity, "collection-1");
|
||||
context.publishEvent(event);
|
||||
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
|
||||
@@ -41,6 +41,7 @@ import org.springframework.test.util.ReflectionTestUtils;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
@@ -49,6 +50,7 @@ import com.mongodb.WriteConcern;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Viktor Khoroshko
|
||||
*/
|
||||
public class MongoDbFactoryParserIntegrationTests {
|
||||
|
||||
@@ -174,6 +176,75 @@ public class MongoDbFactoryParserIntegrationTests {
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri-and-details.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1218
|
||||
*/
|
||||
@Test
|
||||
public void setsUpMongoDbFactoryUsingAMongoClientUri() {
|
||||
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri.xml"));
|
||||
BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory");
|
||||
ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues();
|
||||
|
||||
assertThat(constructorArguments.getArgumentCount(), is(1));
|
||||
ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class);
|
||||
assertThat(argument, is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1218
|
||||
*/
|
||||
@Test(expected = BeanDefinitionParsingException.class)
|
||||
public void rejectsClientUriPlusDetailedConfiguration() {
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-and-details.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1293
|
||||
*/
|
||||
@Test
|
||||
public void setsUpClientUriWithId() {
|
||||
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-and-id.xml"));
|
||||
BeanDefinition definition = factory.getBeanDefinition("testMongo");
|
||||
ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues();
|
||||
|
||||
assertThat(constructorArguments.getArgumentCount(), is(1));
|
||||
ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class);
|
||||
assertThat(argument, is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1293
|
||||
*/
|
||||
@Test
|
||||
public void setsUpUriWithId() {
|
||||
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri-and-id.xml"));
|
||||
BeanDefinition definition = factory.getBeanDefinition("testMongo");
|
||||
ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues();
|
||||
|
||||
assertThat(constructorArguments.getArgumentCount(), is(1));
|
||||
ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class);
|
||||
assertThat(argument, is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1293
|
||||
*/
|
||||
@Test(expected = BeanDefinitionParsingException.class)
|
||||
public void rejectsClientUriPlusDetailedConfigurationAndWriteConcern() {
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-write-concern-and-details.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1293
|
||||
*/
|
||||
@Test(expected = BeanDefinitionParsingException.class)
|
||||
public void rejectsUriPlusDetailedConfigurationAndWriteConcern() {
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-write-concern-and-details.xml"));
|
||||
}
|
||||
|
||||
private static void assertWriteConcern(ClassPathXmlApplicationContext ctx, WriteConcern expectedWriteConcern) {
|
||||
|
||||
SimpleMongoDbFactory dbFactory = ctx.getBean("first", SimpleMongoDbFactory.class);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,7 @@ import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Ignore;
|
||||
@@ -37,6 +38,13 @@ import com.mongodb.CommandResult;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
@@ -70,10 +78,13 @@ public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasSize(3));
|
||||
assertThat(
|
||||
replicaSetSeeds,
|
||||
hasItems(new ServerAddress("192.168.174.130", 27017), new ServerAddress("192.168.174.130", 27018),
|
||||
new ServerAddress("192.168.174.130", 27019)));
|
||||
|
||||
List<Integer> ports = new ArrayList<Integer>();
|
||||
for (ServerAddress replicaSetSeed : replicaSetSeeds) {
|
||||
ports.add(replicaSetSeed.getPort());
|
||||
}
|
||||
|
||||
assertThat(ports, hasItems(27017, 27018, 27019));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -0,0 +1,343 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link DefaultBulkOperations}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class DefaultBulkOperationsIntegrationTests {
|
||||
|
||||
static final String COLLECTION_NAME = "bulk_ops";
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
|
||||
DBCollection collection;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
this.collection = this.operations.getCollection(COLLECTION_NAME);
|
||||
this.collection.remove(new BasicDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullMongoOperations() {
|
||||
new DefaultBulkOperations(null, null, COLLECTION_NAME, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullCollectionName() {
|
||||
new DefaultBulkOperations(operations, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsEmptyCollectionName() {
|
||||
new DefaultBulkOperations(operations, null, "", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount(), is(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertOrderedFails() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
try {
|
||||
createBulkOps(BulkMode.ORDERED).insert(documents).execute();
|
||||
fail();
|
||||
} catch (BulkOperationException e) {
|
||||
assertThat(e.getResult().getInsertedCount(), is(1)); // fails after first error
|
||||
assertThat(e.getErrors(), notNullValue());
|
||||
assertThat(e.getErrors().size(), is(1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertUnOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount(), is(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertUnOrderedContinuesOnError() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
try {
|
||||
createBulkOps(BulkMode.UNORDERED).insert(documents).execute();
|
||||
fail();
|
||||
} catch (BulkOperationException e) {
|
||||
assertThat(e.getResult().getInsertedCount(), is(2)); // two docs were inserted
|
||||
assertThat(e.getErrors(), notNullValue());
|
||||
assertThat(e.getErrors().size(), is(1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void upsertDoesUpdate() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("value", "value1"), set("value", "value2")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getMatchedCount(), is(2));
|
||||
assertThat(result.getModifiedCount(), is(2));
|
||||
assertThat(result.getInsertedCount(), is(0));
|
||||
assertThat(result.getUpserts(), is(notNullValue()));
|
||||
assertThat(result.getUpserts().size(), is(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void upsertDoesInsert() {
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("_id", "1"), set("value", "v1")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getMatchedCount(), is(0));
|
||||
assertThat(result.getModifiedCount(), is(0));
|
||||
assertThat(result.getUpserts(), is(notNullValue()));
|
||||
assertThat(result.getUpserts().size(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateOneOrdered() {
|
||||
testUpdate(BulkMode.ORDERED, false, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiOrdered() {
|
||||
testUpdate(BulkMode.ORDERED, true, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateOneUnOrdered() {
|
||||
testUpdate(BulkMode.UNORDERED, false, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiUnOrdered() {
|
||||
testUpdate(BulkMode.UNORDERED, true, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void removeOrdered() {
|
||||
testRemove(BulkMode.ORDERED);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void removeUnordered() {
|
||||
testRemove(BulkMode.UNORDERED);
|
||||
}
|
||||
|
||||
/**
|
||||
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
|
||||
*
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void mixedBulkOrdered() {
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(newDoc("1", "v1")).//
|
||||
updateOne(where("_id", "1"), set("value", "v2")).//
|
||||
remove(where("value", "v2")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getInsertedCount(), is(1));
|
||||
assertThat(result.getModifiedCount(), is(1));
|
||||
assertThat(result.getRemovedCount(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void mixedBulkOrderedWithList() {
|
||||
|
||||
List<BaseDoc> inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2"));
|
||||
List<Pair<Query, Update>> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3")));
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"));
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(inserts).updateMulti(updates).remove(removes)
|
||||
.execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getInsertedCount(), is(3));
|
||||
assertThat(result.getModifiedCount(), is(2));
|
||||
assertThat(result.getRemovedCount(), is(1));
|
||||
}
|
||||
|
||||
private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) {
|
||||
|
||||
BulkOperations bulkOps = createBulkOps(mode);
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Pair<Query, Update>> updates = new ArrayList<Pair<Query, Update>>();
|
||||
updates.add(Pair.of(where("value", "value1"), set("value", "value3")));
|
||||
updates.add(Pair.of(where("value", "value2"), set("value", "value4")));
|
||||
|
||||
int modifiedCount = multi ? bulkOps.updateMulti(updates).execute().getModifiedCount()
|
||||
: bulkOps.updateOne(updates).execute().getModifiedCount();
|
||||
|
||||
assertThat(modifiedCount, is(expectedUpdates));
|
||||
}
|
||||
|
||||
private void testRemove(BulkMode mode) {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"), where("value", "value2"));
|
||||
|
||||
assertThat(createBulkOps(mode).remove(removes).execute().getRemovedCount(), is(3));
|
||||
}
|
||||
|
||||
private BulkOperations createBulkOps(BulkMode mode) {
|
||||
|
||||
DefaultBulkOperations operations = new DefaultBulkOperations(this.operations, mode, COLLECTION_NAME, null);
|
||||
operations.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
private void insertSomeDocuments() {
|
||||
|
||||
final DBCollection coll = operations.getCollection(COLLECTION_NAME);
|
||||
|
||||
coll.insert(rawDoc("1", "value1"));
|
||||
coll.insert(rawDoc("2", "value1"));
|
||||
coll.insert(rawDoc("3", "value2"));
|
||||
coll.insert(rawDoc("4", "value2"));
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id) {
|
||||
|
||||
BaseDoc doc = new BaseDoc();
|
||||
doc.id = id;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id, String value) {
|
||||
|
||||
BaseDoc doc = newDoc(id);
|
||||
doc.value = value;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static Query where(String field, String value) {
|
||||
return new Query().addCriteria(Criteria.where(field).is(value));
|
||||
}
|
||||
|
||||
private static Update set(String field, String value) {
|
||||
return new Update().set(field, value);
|
||||
}
|
||||
|
||||
private static DBObject rawDoc(String id, String value) {
|
||||
return new BasicDBObject("_id", id).append("value", value);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -190,4 +190,12 @@ public class DefaultScriptOperationsTests {
|
||||
public void scriptNamesShouldReturnEmptySetWhenNoScriptRegistered() {
|
||||
assertThat(scriptOps.getScriptNames(), is(empty()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1465
|
||||
*/
|
||||
@Test
|
||||
public void executeShouldNotQuoteStrings() {
|
||||
assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, "spring-data"), is((Object) "spring-data"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -63,6 +63,7 @@ import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
@@ -197,6 +198,7 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(Address.class);
|
||||
template.dropCollection(DocumentWithCollectionOfSamples.class);
|
||||
template.dropCollection(WithGeoJson.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -3143,11 +3145,30 @@ public class MongoTemplateTests {
|
||||
assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1401
|
||||
*/
|
||||
@Test
|
||||
public void updateShouldWorkForTypesContainingGeoJsonTypes() {
|
||||
|
||||
WithGeoJson wgj = new WithGeoJson();
|
||||
wgj.id = "1";
|
||||
wgj.description = "datamongo-1401";
|
||||
wgj.point = new GeoJsonPoint(1D, 2D);
|
||||
|
||||
template.save(wgj);
|
||||
|
||||
wgj.description = "datamongo-1401-update";
|
||||
template.save(wgj);
|
||||
|
||||
assertThat(template.findOne(query(where("id").is(wgj.id)), WithGeoJson.class).point, is(equalTo(wgj.point)));
|
||||
}
|
||||
|
||||
static class DoucmentWithNamedIdField {
|
||||
|
||||
@Id String someIdKey;
|
||||
|
||||
@Field(value = "val")//
|
||||
@Field(value = "val") //
|
||||
String value;
|
||||
|
||||
@Override
|
||||
@@ -3484,4 +3505,13 @@ public class MongoTemplateTests {
|
||||
|
||||
}
|
||||
|
||||
static class WithGeoJson {
|
||||
|
||||
@Id String id;
|
||||
@Version //
|
||||
Integer version;
|
||||
String description;
|
||||
GeoJsonPoint point;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Integration tests for DATAMONGO-1289.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class NoExplicitIdTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories(considerNestedRepositories = true)
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "test";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
}
|
||||
|
||||
@Autowired MongoOperations mongoOps;
|
||||
@Autowired TypeWithoutExplicitIdPropertyRepository repo;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
mongoOps.dropCollection(TypeWithoutIdProperty.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1289
|
||||
*/
|
||||
@Test
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaTemplate() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
|
||||
mongoOps.save(noid);
|
||||
|
||||
TypeWithoutIdProperty retrieved = mongoOps.findOne(query(where("someString").is(noid.someString)),
|
||||
TypeWithoutIdProperty.class);
|
||||
|
||||
assertThat(retrieved.someString, is(noid.someString));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1289
|
||||
*/
|
||||
@Test
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaRepository() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
|
||||
repo.save(noid);
|
||||
|
||||
TypeWithoutIdProperty retrieved = repo.findBySomeString(noid.someString);
|
||||
assertThat(retrieved.someString, is(noid.someString));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1289
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaRepositoryFindOne() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
|
||||
repo.save(noid);
|
||||
|
||||
Map<String, Object> map = mongoOps.findOne(query(where("someString").is(noid.someString)), Map.class,
|
||||
"typeWithoutIdProperty");
|
||||
|
||||
TypeWithoutIdProperty retrieved = repo.findOne(map.get("_id").toString());
|
||||
assertThat(retrieved.someString, is(noid.someString));
|
||||
}
|
||||
|
||||
static class TypeWithoutIdProperty {
|
||||
|
||||
String someString;
|
||||
}
|
||||
|
||||
static interface TypeWithoutExplicitIdPropertyRepository extends MongoRepository<TypeWithoutIdProperty, String> {
|
||||
|
||||
TypeWithoutIdProperty findBySomeString(String someString);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,18 +20,22 @@ import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hamcrest.Matcher;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link SerializationUtils}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class SerializationUtilsUnitTests {
|
||||
|
||||
@@ -60,6 +64,74 @@ public class SerializationUtilsUnitTests {
|
||||
assertThat(serializeToJsonSafely(dbObject), is(expectedOutput));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1245
|
||||
*/
|
||||
@Test
|
||||
public void flattenMapShouldFlatOutNestedStructureCorrectly() {
|
||||
|
||||
DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("value", "conflux")).get();
|
||||
|
||||
assertThat(flattenMap(dbo), hasEntry("_id", (Object) 1));
|
||||
assertThat(flattenMap(dbo), hasEntry("nested.value", (Object) "conflux"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1245
|
||||
*/
|
||||
@Test
|
||||
public void flattenMapShouldFlatOutNestedStructureWithListCorrectly() {
|
||||
|
||||
BasicDBList dbl = new BasicDBList();
|
||||
dbl.addAll(Arrays.asList("nightwielder", "calamity"));
|
||||
|
||||
DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("value", dbl)).get();
|
||||
|
||||
assertThat(flattenMap(dbo), hasEntry("_id", (Object) 1));
|
||||
assertThat(flattenMap(dbo), hasEntry("nested.value", (Object) dbl));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1245
|
||||
*/
|
||||
@Test
|
||||
public void flattenMapShouldLeaveKeywordsUntouched() {
|
||||
|
||||
DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("$regex", "^conflux$"))
|
||||
.get();
|
||||
|
||||
Map<String, Object> map = flattenMap(dbo);
|
||||
|
||||
assertThat(map, hasEntry("_id", (Object) 1));
|
||||
assertThat(map.get("nested"), notNullValue());
|
||||
assertThat(((Map<String, Object>) map.get("nested")).get("$regex"), is((Object) "^conflux$"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1245
|
||||
*/
|
||||
@Test
|
||||
public void flattenMapShouldAppendCommandsCorrectly() {
|
||||
|
||||
DBObject dbo = new BasicDBObjectBuilder().add("_id", 1)
|
||||
.add("nested", new BasicDBObjectBuilder().add("$regex", "^conflux$").add("$options", "i").get()).get();
|
||||
|
||||
Map<String, Object> map = flattenMap(dbo);
|
||||
|
||||
assertThat(map, hasEntry("_id", (Object) 1));
|
||||
assertThat(map.get("nested"), notNullValue());
|
||||
assertThat(((Map<String, Object>) map.get("nested")).get("$regex"), is((Object) "^conflux$"));
|
||||
assertThat(((Map<String, Object>) map.get("nested")).get("$options"), is((Object) "i"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1245
|
||||
*/
|
||||
@Test
|
||||
public void flattenMapShouldReturnEmptyMapWhenSourceIsNull() {
|
||||
assertThat(flattenMap(null).isEmpty(), is(true));
|
||||
}
|
||||
|
||||
static class Complex {
|
||||
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ public class UnwrapAndReadDbObjectCallbackUnitTests {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory),
|
||||
new MongoMappingContext());
|
||||
|
||||
this.callback = template.new UnwrapAndReadDbObjectCallback<Target>(converter, Target.class);
|
||||
this.callback = template.new UnwrapAndReadDbObjectCallback<Target>(converter, Target.class, "collection-1");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import static org.junit.Assume.*;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.text.ParseException;
|
||||
@@ -76,6 +77,7 @@ import com.mongodb.util.JSON;
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -85,6 +87,7 @@ public class AggregationTests {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AggregationTests.class);
|
||||
private static final Version TWO_DOT_FOUR = new Version(2, 4);
|
||||
private static final Version TWO_DOT_SIX = new Version(2, 6);
|
||||
private static final Version THREE_DOT_TWO = new Version(3, 2);
|
||||
|
||||
private static boolean initialized = false;
|
||||
|
||||
@@ -1068,6 +1071,76 @@ public class AggregationTests {
|
||||
assertThat(result.get("totalValue"), is(equalTo((Object) 100.0)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void shouldLookupPeopleCorectly() {
|
||||
|
||||
assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO));
|
||||
|
||||
createUsersWithReferencedPersons();
|
||||
|
||||
TypedAggregation<User> agg = newAggregation(User.class, //
|
||||
lookup("person", "_id", "firstname", "linkedPerson"), //
|
||||
sort(ASC, "id"));
|
||||
|
||||
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, User.class, DBObject.class);
|
||||
|
||||
List<DBObject> mappedResults = results.getMappedResults();
|
||||
|
||||
DBObject firstItem = mappedResults.get(0);
|
||||
|
||||
assertThat(firstItem, isBsonObject().containing("_id", "u1"));
|
||||
assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void shouldGroupByAndLookupPeopleCorectly() {
|
||||
|
||||
assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO));
|
||||
|
||||
createUsersWithReferencedPersons();
|
||||
|
||||
TypedAggregation<User> agg = newAggregation(User.class, //
|
||||
group().min("id").as("foreignKey"), //
|
||||
lookup("person", "foreignKey", "firstname", "linkedPerson"), //
|
||||
sort(ASC, "foreignKey", "linkedPerson.firstname"));
|
||||
|
||||
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, User.class, DBObject.class);
|
||||
|
||||
List<DBObject> mappedResults = results.getMappedResults();
|
||||
|
||||
DBObject firstItem = mappedResults.get(0);
|
||||
|
||||
assertThat(firstItem, isBsonObject().containing("foreignKey", "u1"));
|
||||
assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1"));
|
||||
}
|
||||
|
||||
private void createUsersWithReferencedPersons() {
|
||||
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
mongoTemplate.dropCollection(Person.class);
|
||||
|
||||
User user1 = new User("u1");
|
||||
User user2 = new User("u2");
|
||||
User user3 = new User("u3");
|
||||
|
||||
mongoTemplate.save(user1);
|
||||
mongoTemplate.save(user2);
|
||||
mongoTemplate.save(user3);
|
||||
|
||||
Person person1 = new Person("u1", "User 1");
|
||||
Person person2 = new Person("u2", "User 2");
|
||||
|
||||
mongoTemplate.save(person1);
|
||||
mongoTemplate.save(person2);
|
||||
mongoTemplate.save(user3);
|
||||
}
|
||||
|
||||
private void assertLikeStats(LikeStats like, String id, long count) {
|
||||
|
||||
assertThat(like, is(notNullValue()));
|
||||
|
||||
@@ -0,0 +1,166 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link LookupOperation}.
|
||||
*
|
||||
* @author Alessio Fachechi
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class LookupOperationUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullForFrom() {
|
||||
new LookupOperation(null, Fields.field("localField"), Fields.field("foreignField"), Fields.field("as"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullLocalFieldField() {
|
||||
new LookupOperation(Fields.field("from"), null, Fields.field("foreignField"), Fields.field("as"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullForeignField() {
|
||||
new LookupOperation(Fields.field("from"), Fields.field("localField"), null, Fields.field("as"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullForAs() {
|
||||
new LookupOperation(Fields.field("from"), Fields.field("localField"), Fields.field("foreignField"), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupOperationWithValues() {
|
||||
|
||||
LookupOperation lookupOperation = Aggregation.lookup("a", "b", "c", "d");
|
||||
|
||||
DBObject lookupClause = extractDbObjectFromLookupOperation(lookupOperation);
|
||||
|
||||
assertThat(lookupClause,
|
||||
isBsonObject().containing("from", "a") //
|
||||
.containing("localField", "b") //
|
||||
.containing("foreignField", "c") //
|
||||
.containing("as", "d"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupOperationExposesAsField() {
|
||||
|
||||
LookupOperation lookupOperation = Aggregation.lookup("a", "b", "c", "d");
|
||||
|
||||
assertThat(lookupOperation.getFields().exposesNoFields(), is(false));
|
||||
assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true));
|
||||
assertThat(lookupOperation.getFields().getField("d"), notNullValue());
|
||||
}
|
||||
|
||||
private DBObject extractDbObjectFromLookupOperation(LookupOperation lookupOperation) {
|
||||
|
||||
DBObject dbObject = lookupOperation.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
DBObject lookupClause = DBObjectTestUtils.getAsDBObject(dbObject, "$lookup");
|
||||
return lookupClause;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void builderRejectsNullFromField() {
|
||||
LookupOperation.newLookup().from(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void builderRejectsNullLocalField() {
|
||||
LookupOperation.newLookup().from("a").localField(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void builderRejectsNullForeignField() {
|
||||
LookupOperation.newLookup().from("a").localField("b").foreignField(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void builderRejectsNullAsField() {
|
||||
LookupOperation.newLookup().from("a").localField("b").foreignField("c").as(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupBuilderBuildsCorrectClause() {
|
||||
|
||||
LookupOperation lookupOperation = LookupOperation.newLookup().from("a").localField("b").foreignField("c").as("d");
|
||||
|
||||
DBObject lookupClause = extractDbObjectFromLookupOperation(lookupOperation);
|
||||
|
||||
assertThat(lookupClause,
|
||||
isBsonObject().containing("from", "a") //
|
||||
.containing("localField", "b") //
|
||||
.containing("foreignField", "c") //
|
||||
.containing("as", "d"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupBuilderExposesFields() {
|
||||
|
||||
LookupOperation lookupOperation = LookupOperation.newLookup().from("a").localField("b").foreignField("c").as("d");
|
||||
|
||||
assertThat(lookupOperation.getFields().exposesNoFields(), is(false));
|
||||
assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true));
|
||||
assertThat(lookupOperation.getFields().getField("d"), notNullValue());
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
@@ -51,6 +52,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class TypeBasedAggregationOperationContextUnitTests {
|
||||
@@ -187,6 +189,96 @@ public class TypeBasedAggregationOperationContextUnitTests {
|
||||
assertThat(definition.get("_id"), is(equalTo((Object) "$counter_name")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupShouldInheritFieldsFromInheritingAggregationOperation() {
|
||||
|
||||
TypeBasedAggregationOperationContext context = getContext(MeterData.class);
|
||||
TypedAggregation<MeterData> agg = newAggregation(MeterData.class,
|
||||
lookup("OtherCollection", "resourceId", "otherId", "lookup"), sort(Direction.ASC, "resourceId"));
|
||||
|
||||
DBObject dbo = agg.toDbObject("meterData", context);
|
||||
DBObject sort = getPipelineElementFromAggregationAt(dbo, 1);
|
||||
|
||||
DBObject definition = (DBObject) sort.get("$sort");
|
||||
|
||||
assertThat(definition.get("resourceId"), is(equalTo((Object) 1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void groupLookupShouldInheritFieldsFromPreviousAggregationOperation() {
|
||||
|
||||
TypeBasedAggregationOperationContext context = getContext(MeterData.class);
|
||||
TypedAggregation<MeterData> agg = newAggregation(MeterData.class, group().min("resourceId").as("foreignKey"),
|
||||
lookup("OtherCollection", "foreignKey", "otherId", "lookup"), sort(Direction.ASC, "foreignKey"));
|
||||
|
||||
DBObject dbo = agg.toDbObject("meterData", context);
|
||||
DBObject sort = getPipelineElementFromAggregationAt(dbo, 2);
|
||||
|
||||
DBObject definition = (DBObject) sort.get("$sort");
|
||||
|
||||
assertThat(definition.get("foreignKey"), is(equalTo((Object) 1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupGroupAggregationShouldUseCorrectGroupField() {
|
||||
|
||||
TypeBasedAggregationOperationContext context = getContext(MeterData.class);
|
||||
TypedAggregation<MeterData> agg = newAggregation(MeterData.class,
|
||||
lookup("OtherCollection", "resourceId", "otherId", "lookup"),
|
||||
group().min("lookup.otherkey").as("something_totally_different"));
|
||||
|
||||
DBObject dbo = agg.toDbObject("meterData", context);
|
||||
DBObject group = getPipelineElementFromAggregationAt(dbo, 1);
|
||||
|
||||
DBObject definition = (DBObject) group.get("$group");
|
||||
DBObject field = (DBObject) definition.get("something_totally_different");
|
||||
|
||||
assertThat(field.get("$min"), is(equalTo((Object) "$lookup.otherkey")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test
|
||||
public void lookupGroupAggregationShouldOverwriteExposedFields() {
|
||||
|
||||
TypeBasedAggregationOperationContext context = getContext(MeterData.class);
|
||||
TypedAggregation<MeterData> agg = newAggregation(MeterData.class,
|
||||
lookup("OtherCollection", "resourceId", "otherId", "lookup"),
|
||||
group().min("lookup.otherkey").as("something_totally_different"),
|
||||
sort(Direction.ASC, "something_totally_different"));
|
||||
|
||||
DBObject dbo = agg.toDbObject("meterData", context);
|
||||
DBObject sort = getPipelineElementFromAggregationAt(dbo, 2);
|
||||
|
||||
DBObject definition = (DBObject) sort.get("$sort");
|
||||
|
||||
assertThat(definition.get("something_totally_different"), is(equalTo((Object) 1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1326
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void lookupGroupAggregationShouldFailInvalidFieldReference() {
|
||||
|
||||
TypeBasedAggregationOperationContext context = getContext(MeterData.class);
|
||||
TypedAggregation<MeterData> agg = newAggregation(MeterData.class,
|
||||
lookup("OtherCollection", "resourceId", "otherId", "lookup"),
|
||||
group().min("lookup.otherkey").as("something_totally_different"), sort(Direction.ASC, "resourceId"));
|
||||
|
||||
agg.toDbObject("meterData", context);
|
||||
}
|
||||
|
||||
@Document(collection = "person")
|
||||
public static class FooPerson {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,6 +24,7 @@ import java.text.Format;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Currency;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
@@ -271,6 +272,18 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(customConversions.getCustomWriteTarget(String.class, SimpleDateFormat.class), notNullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void registersConvertersForCurrency() {
|
||||
|
||||
CustomConversions customConversions = new CustomConversions();
|
||||
|
||||
assertThat(customConversions.hasCustomWriteTarget(Currency.class), is(true));
|
||||
assertThat(customConversions.hasCustomReadTarget(String.class, Currency.class), is(true));
|
||||
}
|
||||
|
||||
private static Class<?> createProxyTypeFor(Class<?> type) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
|
||||
@@ -101,6 +101,20 @@ public class DBObjectAccessorUnitTests {
|
||||
assertThat(nestedA.get("c"), is((Object) "c"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1471
|
||||
*/
|
||||
@Test
|
||||
public void exposesAvailabilityOfFields() {
|
||||
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(new BasicDBObject("a", new BasicDBObject("c", "d")));
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(ProjectingType.class);
|
||||
|
||||
assertThat(accessor.hasValue(entity.getPersistentProperty("foo")), is(false));
|
||||
assertThat(accessor.hasValue(entity.getPersistentProperty("a")), is(true));
|
||||
assertThat(accessor.hasValue(entity.getPersistentProperty("name")), is(false));
|
||||
}
|
||||
|
||||
static class ProjectingType {
|
||||
|
||||
String name;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -48,9 +48,9 @@ public class DefaultMongoTypeMapperUnitTests {
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
configurableTypeInformationMapper = new ConfigurableTypeInformationMapper(Collections.singletonMap(String.class,
|
||||
"1"));
|
||||
simpleTypeInformationMapper = SimpleTypeInformationMapper.INSTANCE;
|
||||
configurableTypeInformationMapper = new ConfigurableTypeInformationMapper(
|
||||
Collections.singletonMap(String.class, "1"));
|
||||
simpleTypeInformationMapper = new SimpleTypeInformationMapper();
|
||||
|
||||
typeMapper = new DefaultMongoTypeMapper();
|
||||
}
|
||||
@@ -81,8 +81,8 @@ public class DefaultMongoTypeMapperUnitTests {
|
||||
@Test
|
||||
public void writesClassNamesForUnmappedValuesIfConfigured() {
|
||||
|
||||
typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(
|
||||
configurableTypeInformationMapper, simpleTypeInformationMapper));
|
||||
typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
|
||||
Arrays.asList(configurableTypeInformationMapper, simpleTypeInformationMapper));
|
||||
|
||||
writesTypeToField(new BasicDBObject(), String.class, "1");
|
||||
writesTypeToField(new BasicDBObject(), Object.class, Object.class.getName());
|
||||
@@ -101,11 +101,12 @@ public class DefaultMongoTypeMapperUnitTests {
|
||||
@Test
|
||||
public void readsTypeLoadingClassesForUnmappedTypesIfConfigured() {
|
||||
|
||||
typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(
|
||||
configurableTypeInformationMapper, simpleTypeInformationMapper));
|
||||
typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
|
||||
Arrays.asList(configurableTypeInformationMapper, simpleTypeInformationMapper));
|
||||
|
||||
readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "1"), String.class);
|
||||
readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Object.class.getName()), Object.class);
|
||||
readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Object.class.getName()),
|
||||
Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -144,7 +145,8 @@ public class DefaultMongoTypeMapperUnitTests {
|
||||
|
||||
@Test
|
||||
public void readsTypeFromDefaultKeyByDefault() {
|
||||
readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()), String.class);
|
||||
readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()),
|
||||
String.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.hamcrest.core.IsEqual.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class LazyLoadingInterceptorUnitTests {
|
||||
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Mock MongoPersistentProperty propertyMock;
|
||||
@Mock DBRef dbrefMock;
|
||||
@Mock DbRefResolverCallback callbackMock;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1437
|
||||
*/
|
||||
@Test
|
||||
public void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable {
|
||||
|
||||
NullPointerException npe = new NullPointerException("Some Exception we did not think about.");
|
||||
when(callbackMock.resolve(propertyMock)).thenThrow(npe);
|
||||
|
||||
exception.expect(LazyLoadingException.class);
|
||||
exception.expectCause(is(equalTo(npe)));
|
||||
|
||||
new LazyLoadingInterceptor(propertyMock, dbrefMock, new NullExceptionTranslator(), callbackMock).intercept(null,
|
||||
LazyLoadingProxy.class.getMethod("getTarget"), null, null);
|
||||
}
|
||||
|
||||
static class NullExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
@Override
|
||||
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2066,6 +2066,14 @@ public class MappingMongoConverterUnitTests {
|
||||
assertThat(target.map.get(FooBarEnum.FOO), is("spring"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1471
|
||||
*/
|
||||
@Test
|
||||
public void readsDocumentWithPrimitiveIdButNoValue() {
|
||||
assertThat(converter.read(ClassWithIntId.class, new BasicDBObject()), is(notNullValue()));
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
* Copyright 2011-2016 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,10 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Currency;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.geo.Box;
|
||||
@@ -26,8 +29,14 @@ import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Polygon;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicIntegerToIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicLongToLongConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.CurrencyToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.IntegerToAtomicIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.LongToAtomicLongConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToCurrencyConverter;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -37,6 +46,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoConvertersUnitTests {
|
||||
|
||||
@@ -120,4 +130,52 @@ public class MongoConvertersUnitTests {
|
||||
|
||||
assertThat(converted, is((org.springframework.data.geo.Point) point));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void convertsCurrencyToStringCorrectly() {
|
||||
assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD")), is("USD"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void convertsStringToCurrencyCorrectly() {
|
||||
assertThat(StringToCurrencyConverter.INSTANCE.convert("USD"), is(Currency.getInstance("USD")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1416
|
||||
*/
|
||||
@Test
|
||||
public void convertsAtomicLongToLongCorrectly() {
|
||||
assertThat(AtomicLongToLongConverter.INSTANCE.convert(new AtomicLong(100L)), is(100L));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1416
|
||||
*/
|
||||
@Test
|
||||
public void convertsAtomicIntegerToIntegerCorrectly() {
|
||||
assertThat(AtomicIntegerToIntegerConverter.INSTANCE.convert(new AtomicInteger(100)), is(100));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1416
|
||||
*/
|
||||
@Test
|
||||
public void convertsLongToAtomicLongCorrectly() {
|
||||
assertThat(LongToAtomicLongConverter.INSTANCE.convert(100L), is(instanceOf(AtomicLong.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1416
|
||||
*/
|
||||
@Test
|
||||
public void convertsIntegerToAtomicIntegerCorrectly() {
|
||||
assertThat(IntegerToAtomicIntegerConverter.INSTANCE.convert(100), is(instanceOf(AtomicInteger.class)));
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user