Compare commits
68 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c70d18a9bb | ||
|
|
465ed26fa5 | ||
|
|
c4a53bcf65 | ||
|
|
fde38dae3c | ||
|
|
008ca72888 | ||
|
|
c50dfba2cf | ||
|
|
701153ac8f | ||
|
|
6b394e4da6 | ||
|
|
7007f484a8 | ||
|
|
72a24a923d | ||
|
|
7f5591c20c | ||
|
|
bf3b5dee70 | ||
|
|
11838839cf | ||
|
|
116d384bc5 | ||
|
|
6ce5a26dc6 | ||
|
|
2716d1d503 | ||
|
|
32223fc00a | ||
|
|
1884b7a97a | ||
|
|
1f670bb5ed | ||
|
|
3e97d47248 | ||
|
|
c02840ca30 | ||
|
|
975768c0d6 | ||
|
|
323ec3f1d6 | ||
|
|
e480ceb2b7 | ||
|
|
9fedd8d8c3 | ||
|
|
baddae25da | ||
|
|
5064cf3e9a | ||
|
|
81271a4f2f | ||
|
|
c36f9988c7 | ||
|
|
b49beb08b6 | ||
|
|
18b0946879 | ||
|
|
cf3681f7c2 | ||
|
|
49ef3fbc74 | ||
|
|
01141502a0 | ||
|
|
5d8f3d5c8b | ||
|
|
5cf801ff8e | ||
|
|
2f3fb4aea9 | ||
|
|
82e05e7e8e | ||
|
|
5be2e3eea2 | ||
|
|
54f55e04de | ||
|
|
9ef1386784 | ||
|
|
696fd725c3 | ||
|
|
e6fda2ccdd | ||
|
|
7a24bab9a2 | ||
|
|
38b7fb7105 | ||
|
|
d42d06e058 | ||
|
|
e2709abfe0 | ||
|
|
12b4aab834 | ||
|
|
db06756c8f | ||
|
|
b319b8a589 | ||
|
|
a516795759 | ||
|
|
bab08502a5 | ||
|
|
3e1f95bc94 | ||
|
|
5c153dc76e | ||
|
|
8f4e207d97 | ||
|
|
5000a40d72 | ||
|
|
fb59f49dae | ||
|
|
f3c1e014e9 | ||
|
|
f52cc3be1f | ||
|
|
1bda93858c | ||
|
|
1808970daf | ||
|
|
558fc28cce | ||
|
|
16bef54f11 | ||
|
|
d68a812e1b | ||
|
|
ccb9f111d9 | ||
|
|
f64b177c8f | ||
|
|
c0c7ba767f | ||
|
|
7639701f3f |
24
Jenkinsfile
vendored
24
Jenkinsfile
vendored
@@ -76,6 +76,9 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
@@ -85,7 +88,7 @@ pipeline {
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -105,6 +108,9 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
@@ -114,7 +120,7 @@ pipeline {
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -126,6 +132,9 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
@@ -135,7 +144,7 @@ pipeline {
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -147,6 +156,9 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
@@ -156,7 +168,7 @@ pipeline {
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -185,7 +197,7 @@ pipeline {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
@@ -216,7 +228,7 @@ pipeline {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
|
||||
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
11
pom.xml
11
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<version>3.1.14</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.4.6</version>
|
||||
<version>2.4.14</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.4.6</springdata.commons>
|
||||
<springdata.commons>2.4.14</springdata.commons>
|
||||
<mongo>4.1.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
@@ -158,11 +158,6 @@
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>bintray-plugins</id>
|
||||
<name>bintray-plugins</name>
|
||||
<url>https://jcenter.bintray.com</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
29
settings.xml
Normal file
29
settings.xml
Normal file
@@ -0,0 +1,29 @@
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
|
||||
<servers>
|
||||
<server>
|
||||
<id>spring-plugins-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-milestone</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
</servers>
|
||||
|
||||
</settings>
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<version>3.1.14</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<version>3.1.14</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.6</version>
|
||||
<version>3.1.14</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -75,6 +75,7 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -182,6 +183,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire
|
||||
* object to fail. If further customization of the translation is needed, have a look at
|
||||
* {@link #potentiallyEscapeMapKey(String)} as well as {@link #potentiallyUnescapeMapKey(String)}.
|
||||
* <p>
|
||||
* {@code mapKeyDotReplacement} is used as-is during replacement operations without further processing (i.e. regex or
|
||||
* normalization).
|
||||
*
|
||||
* @param mapKeyDotReplacement the mapKeyDotReplacement to set. Can be {@literal null}.
|
||||
*/
|
||||
@@ -900,7 +904,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
source));
|
||||
}
|
||||
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
return StringUtils.replace(source, ".", mapKeyDotReplacement);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -928,7 +932,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @return
|
||||
*/
|
||||
protected String potentiallyUnescapeMapKey(String source) {
|
||||
return mapKeyDotReplacement == null ? source : source.replaceAll(mapKeyDotReplacement, "\\.");
|
||||
return mapKeyDotReplacement == null ? source : StringUtils.replace(source, mapKeyDotReplacement, ".");
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -39,13 +39,14 @@ import com.mongodb.DBRef;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Ryan Gibb
|
||||
*/
|
||||
public interface MongoConverter
|
||||
extends EntityConverter<MongoPersistentEntity<?>, MongoPersistentProperty, Object, Bson>, MongoWriter<Object>,
|
||||
EntityReader<Object, Bson> {
|
||||
|
||||
/**
|
||||
* Returns thw {@link TypeMapper} being used to write type information into {@link Document}s created with that
|
||||
* Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that
|
||||
* converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
|
||||
@@ -19,11 +19,14 @@ import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -63,9 +66,12 @@ import com.mongodb.DBRef;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author David Julia
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(QueryMapper.class);
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final Document META_TEXT_SCORE = new Document("$meta", "textScore");
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
@@ -378,6 +384,10 @@ public class QueryMapper {
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((Bson) value), documentField.getPropertyEntity());
|
||||
}
|
||||
@@ -635,7 +645,7 @@ public class QueryMapper {
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
protected boolean isNestedKeyword(@Nullable Object candidate) {
|
||||
|
||||
if (!(candidate instanceof Document)) {
|
||||
return false;
|
||||
@@ -680,12 +690,14 @@ public class QueryMapper {
|
||||
* converted one by one.
|
||||
*
|
||||
* @param documentField the field and its meta data
|
||||
* @param value the actual value
|
||||
* @param value the actual value. Can be {@literal null}.
|
||||
* @return the potentially converted target value.
|
||||
*/
|
||||
private Object applyFieldTargetTypeHintToValue(Field documentField, Object value) {
|
||||
@Nullable
|
||||
private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) {
|
||||
|
||||
if (documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()
|
||||
|| value instanceof Document || value instanceof DBObject) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -716,7 +728,6 @@ public class QueryMapper {
|
||||
*/
|
||||
static class Keyword {
|
||||
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
private static final Set<String> NON_DBREF_CONVERTING_KEYWORDS = new HashSet<>(
|
||||
Arrays.asList("$", "$size", "$slice", "$gt", "$lt"));
|
||||
|
||||
@@ -747,7 +758,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
return key.equalsIgnoreCase("$or") || key.equalsIgnoreCase("$nor");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1086,8 +1097,8 @@ public class QueryMapper {
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
|
||||
if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) {
|
||||
return mappingContext
|
||||
.getPersistentPropertyPath(PropertyPath.from(sourceProperty.getName(), entity.getTypeInformation()));
|
||||
return mappingContext.getPersistentPropertyPath(
|
||||
PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation()));
|
||||
}
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
@@ -1095,29 +1106,47 @@ public class QueryMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = tryToResolvePersistentPropertyPath(path);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
if (propertyPath == null) {
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
if (QueryMapper.LOGGER.isInfoEnabled()) {
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
String types = StringUtils.collectionToDelimitedString(
|
||||
path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> ");
|
||||
QueryMapper.LOGGER.info(
|
||||
"Could not map '{}'. Maybe a fragment in '{}' is considered a simple type. Mapper continues with {}.",
|
||||
path, types, pathExpression);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
} catch (InvalidPersistentPropertyPath e) {
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> tryToResolvePersistentPropertyPath(PropertyPath path) {
|
||||
|
||||
try {
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
} catch (MappingException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1146,6 +1175,13 @@ public class QueryMapper {
|
||||
return forName(path.substring(0, path.length() - 3) + "id");
|
||||
}
|
||||
|
||||
// Ok give it another try quoting
|
||||
try {
|
||||
return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation());
|
||||
} catch (PropertyReferenceException | InvalidPersistentPropertyPath ex) {
|
||||
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1239,12 +1275,17 @@ public class QueryMapper {
|
||||
static class KeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
private int currentIndex;
|
||||
private String currentPropertyRoot;
|
||||
private final List<String> pathParts;
|
||||
|
||||
public KeyMapper(String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
this.pathParts = Arrays.asList(key.split("\\."));
|
||||
this.iterator = pathParts.iterator();
|
||||
this.currentPropertyRoot = iterator.next();
|
||||
this.currentIndex = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1256,21 +1297,31 @@ public class QueryMapper {
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
currentIndex++;
|
||||
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike();
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ;
|
||||
if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){
|
||||
partial = iterator.next();
|
||||
currentIndex++;
|
||||
}
|
||||
|
||||
if (isPositional || property.isMap()) {
|
||||
if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
if(currentIndex + 1 < pathParts.size()) {
|
||||
currentIndex++;
|
||||
currentPropertyRoot = pathParts.get(currentIndex);
|
||||
}
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
/**
|
||||
* Interface definition for structures defined in <a href="https://geojson.org/>GeoJSON</a> format.
|
||||
* Interface definition for structures defined in <a href="https://geojson.org/">GeoJSON</a> format.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
|
||||
@@ -20,8 +20,10 @@ import static org.springframework.util.ObjectUtils.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -58,6 +60,7 @@ import com.mongodb.BasicDBList;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Andreas Zink
|
||||
* @author Clément Petit
|
||||
*/
|
||||
public class Criteria implements CriteriaDefinition {
|
||||
|
||||
@@ -895,15 +898,15 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @param right
|
||||
* @return
|
||||
*/
|
||||
private boolean isEqual(Object left, Object right) {
|
||||
private boolean isEqual(@Nullable Object left, @Nullable Object right) {
|
||||
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
}
|
||||
|
||||
if (Pattern.class.isInstance(left)) {
|
||||
if (left instanceof Pattern) {
|
||||
|
||||
if (!Pattern.class.isInstance(right)) {
|
||||
if (!(right instanceof Pattern)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -914,6 +917,52 @@ public class Criteria implements CriteriaDefinition {
|
||||
&& leftPattern.flags() == rightPattern.flags();
|
||||
}
|
||||
|
||||
if (left instanceof Document) {
|
||||
|
||||
if (!(right instanceof Document)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Document leftDocument = (Document) left;
|
||||
Document rightDocument = (Document) right;
|
||||
Iterator<Entry<String, Object>> leftIterator = leftDocument.entrySet().iterator();
|
||||
Iterator<Entry<String, Object>> rightIterator = rightDocument.entrySet().iterator();
|
||||
|
||||
while (leftIterator.hasNext() && rightIterator.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> leftEntry = leftIterator.next();
|
||||
Map.Entry<String, Object> rightEntry = rightIterator.next();
|
||||
|
||||
if (!isEqual(leftEntry.getKey(), rightEntry.getKey())
|
||||
|| !isEqual(leftEntry.getValue(), rightEntry.getValue())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return !leftIterator.hasNext() && !rightIterator.hasNext();
|
||||
}
|
||||
|
||||
if (Collection.class.isAssignableFrom(left.getClass())) {
|
||||
|
||||
if (!Collection.class.isAssignableFrom(right.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Collection<?> leftCollection = (Collection<?>) left;
|
||||
Collection<?> rightCollection = (Collection<?>) right;
|
||||
Iterator<?> leftIterator = leftCollection.iterator();
|
||||
Iterator<?> rightIterator = rightCollection.iterator();
|
||||
|
||||
while (leftIterator.hasNext() && rightIterator.hasNext()) {
|
||||
|
||||
if (!isEqual(leftIterator.next(), rightIterator.next())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return !leftIterator.hasNext() && !rightIterator.hasNext();
|
||||
}
|
||||
|
||||
return ObjectUtils.nullSafeEquals(left, right);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -136,7 +137,7 @@ public class Field {
|
||||
*/
|
||||
public Field slice(String field, int offset, int size) {
|
||||
|
||||
slices.put(field, new Integer[] { offset, size });
|
||||
slices.put(field, Arrays.asList(offset, size));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ import org.springframework.data.repository.query.QueryMethodEvaluationContextPro
|
||||
import org.springframework.expression.ExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -163,9 +164,9 @@ abstract class AggregationUtils {
|
||||
* @throws IllegalArgumentException when none of the above rules is met.
|
||||
*/
|
||||
@Nullable
|
||||
static <T> T extractSimpleTypeResult(Document source, Class<T> targetType, MongoConverter converter) {
|
||||
static <T> T extractSimpleTypeResult(@Nullable Document source, Class<T> targetType, MongoConverter converter) {
|
||||
|
||||
if (source.isEmpty()) {
|
||||
if (ObjectUtils.isEmpty(source)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -212,6 +212,10 @@ public class QuerydslMongoPredicateExecutor<T> extends QuerydslPredicateExecutor
|
||||
*/
|
||||
private SpringDataMongodbQuery<T> applyPagination(SpringDataMongodbQuery<T> query, Pageable pageable) {
|
||||
|
||||
if (pageable.isUnpaged()) {
|
||||
return query;
|
||||
}
|
||||
|
||||
query = query.offset(pageable.getOffset()).limit(pageable.getPageSize());
|
||||
return applySorting(query, pageable.getSort());
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.repository.support;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@@ -215,7 +216,7 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
Assert.notNull(ids, "The given Ids of entities not be null!");
|
||||
|
||||
return findAll(new Query(new Criteria(entityInformation.getIdAttribute())
|
||||
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList()))));
|
||||
.in(toCollection(ids))));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -266,10 +267,10 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities not be null!");
|
||||
|
||||
List<S> list = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
|
||||
Collection<S> list = toCollection(entities);
|
||||
|
||||
if (list.isEmpty()) {
|
||||
return list;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return new ArrayList<>(mongoOperations.insertAll(list));
|
||||
@@ -374,6 +375,11 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
return where(entityInformation.getIdAttribute()).is(id);
|
||||
}
|
||||
|
||||
private static <E> Collection<E> toCollection(Iterable<E> ids) {
|
||||
return ids instanceof Collection ? (Collection<E>) ids
|
||||
: StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private List<T> findAll(@Nullable Query query) {
|
||||
|
||||
if (query == null) {
|
||||
|
||||
@@ -21,10 +21,11 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -47,6 +48,7 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Ruben J Garcia
|
||||
* @author Clément Petit
|
||||
* @since 2.0
|
||||
*/
|
||||
public class SimpleReactiveMongoRepository<T, ID extends Serializable> implements ReactiveMongoRepository<T, ID> {
|
||||
@@ -173,7 +175,7 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
Assert.notNull(ids, "The given Iterable of Id's must not be null!");
|
||||
|
||||
return findAll(new Query(new Criteria(entityInformation.getIdAttribute())
|
||||
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList()))));
|
||||
.in(toCollection(ids))));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -274,9 +276,9 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities must not be null!");
|
||||
|
||||
List<S> source = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
|
||||
Collection<S> source = toCollection(entities);
|
||||
|
||||
return source.isEmpty() ? Flux.empty() : Flux.from(mongoOperations.insertAll(source));
|
||||
return source.isEmpty() ? Flux.empty() : mongoOperations.insertAll(source);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -333,8 +335,8 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
Assert.notNull(entityStream, "The given Publisher of entities must not be null!");
|
||||
|
||||
return Flux.from(entityStream).flatMap(entity -> entityInformation.isNew(entity) ? //
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName()).then(Mono.just(entity)) : //
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName()).then(Mono.just(entity)));
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName()) : //
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -436,8 +438,12 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
return where(entityInformation.getIdAttribute()).is(id);
|
||||
}
|
||||
|
||||
private Flux<T> findAll(Query query) {
|
||||
private static <E> Collection<E> toCollection(Iterable<E> ids) {
|
||||
return ids instanceof Collection ? (Collection<E>) ids
|
||||
: StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Flux<T> findAll(Query query) {
|
||||
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3703,6 +3703,23 @@ public class MongoTemplateTests {
|
||||
assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty();
|
||||
}
|
||||
|
||||
@Test // GH-3811
|
||||
public void sliceShouldLimitCollectionValues() {
|
||||
|
||||
DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType();
|
||||
source.id = "id-1";
|
||||
source.values = Arrays.asList("spring", "data", "mongodb");
|
||||
|
||||
template.save(source);
|
||||
|
||||
Criteria criteria = Criteria.where("id").is(source.id);
|
||||
Query query = Query.query(criteria);
|
||||
query.fields().slice("values", 0, 1);
|
||||
DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class);
|
||||
|
||||
assertThat(target.values).containsExactly("spring");
|
||||
}
|
||||
|
||||
private AtomicReference<ImmutableVersioned> createAfterSaveReference() {
|
||||
|
||||
AtomicReference<ImmutableVersioned> saved = new AtomicReference<>();
|
||||
|
||||
@@ -33,14 +33,13 @@ import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.DocumentTestUtils;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
@@ -50,6 +49,7 @@ import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.FieldType;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoId;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.TextScore;
|
||||
@@ -70,22 +70,23 @@ import com.mongodb.client.model.Filters;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author David Julia
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class QueryMapperUnitTests {
|
||||
|
||||
private QueryMapper mapper;
|
||||
private MongoMappingContext context;
|
||||
private MappingMongoConverter converter;
|
||||
|
||||
@Mock MongoDatabaseFactory factory;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
|
||||
MongoCustomConversions conversions = new MongoCustomConversions();
|
||||
this.context = new MongoMappingContext();
|
||||
this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
|
||||
this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context);
|
||||
this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context);
|
||||
this.converter.setCustomConversions(conversions);
|
||||
this.converter.afterPropertiesSet();
|
||||
|
||||
this.mapper = new QueryMapper(converter);
|
||||
@@ -731,6 +732,28 @@ public class QueryMapperUnitTests {
|
||||
assertThat(document).containsKey("map.1.stringProperty");
|
||||
}
|
||||
|
||||
@Test // GH-3688
|
||||
void mappingShouldRetainNestedNumericMapKeys() {
|
||||
|
||||
Query query = query(where("outerMap.1.map.2.stringProperty").is("ba'alzamon"));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class));
|
||||
|
||||
assertThat(document).containsKey("outerMap.1.map.2.stringProperty");
|
||||
}
|
||||
|
||||
@Test // GH-3688
|
||||
void mappingShouldAllowSettingEntireNestedNumericKeyedMapValue() {
|
||||
|
||||
Query query = query(where("outerMap.1.map").is(null)); //newEntityWithComplexValueTypeMap()
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class));
|
||||
|
||||
assertThat(document).containsKey("outerMap.1.map");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1269
|
||||
void mappingShouldRetainNumericPositionInList() {
|
||||
|
||||
@@ -781,7 +804,8 @@ public class QueryMapperUnitTests {
|
||||
Query query = query(byExample(probe).and("listOfItems").exists(true));
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class));
|
||||
|
||||
assertThat(document).containsEntry("embedded\\._id", "conflux").containsEntry("my_items", new org.bson.Document("$exists", true));
|
||||
assertThat(document).containsEntry("embedded\\._id", "conflux").containsEntry("my_items",
|
||||
new org.bson.Document("$exists", true));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1988
|
||||
@@ -1011,6 +1035,136 @@ public class QueryMapperUnitTests {
|
||||
assertThat(target).isEqualTo(org.bson.Document.parse("{\"$text\" : { \"$search\" : \"test\" }}"));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document)
|
||||
.isEqualTo(new org.bson.Document("fieldname_with_underscores", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesMappedFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("renamed_fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document).isEqualTo(new org.bson.Document("renamed", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesSimpleNestedFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("simple.fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document)
|
||||
.isEqualTo(new org.bson.Document("simple.fieldname_with_underscores", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesSimpleNestedMappedFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("simple.renamed_fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document).isEqualTo(new org.bson.Document("simple.renamed", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesFieldNameWithUnderscoreOnNestedFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("double_underscore.fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document).isEqualTo(
|
||||
new org.bson.Document("double_underscore.fieldname_with_underscores", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3601
|
||||
void resolvesFieldNameWithUnderscoreOnNestedMappedFieldnameWithUnderscoresCorrectly() {
|
||||
|
||||
Query query = query(where("double_underscore.renamed_fieldname_with_underscores").exists(true));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class));
|
||||
|
||||
assertThat(document)
|
||||
.isEqualTo(new org.bson.Document("double_underscore.renamed", new org.bson.Document("$exists", true)));
|
||||
}
|
||||
|
||||
@Test // GH-3633
|
||||
void mapsNullValueForFieldWithCustomTargetType() {
|
||||
|
||||
Query query = query(where("stringAsOid").is(null));
|
||||
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class));
|
||||
|
||||
assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", null));
|
||||
}
|
||||
|
||||
@Test // GH-3635
|
||||
void $floorKeywordDoesNotMatch$or$norPattern() {
|
||||
|
||||
Query query = new BasicQuery(" { $expr: { $gt: [ \"$spent\" , { $floor : \"$budget\" } ] } }");
|
||||
assertThatNoException()
|
||||
.isThrownBy(() -> mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)));
|
||||
}
|
||||
|
||||
@Test // GH-3659
|
||||
void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() {
|
||||
|
||||
Query query = query(where("address.street").is("1007 Mountain Drive"));
|
||||
|
||||
MongoCustomConversions mongoCustomConversions = new MongoCustomConversions(
|
||||
Collections.singletonList(new MyAddressToDocumentConverter()));
|
||||
|
||||
this.context = new MongoMappingContext();
|
||||
this.context.setSimpleTypeHolder(mongoCustomConversions.getSimpleTypeHolder());
|
||||
this.context.afterPropertiesSet();
|
||||
|
||||
this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context);
|
||||
this.converter.setCustomConversions(mongoCustomConversions);
|
||||
this.converter.afterPropertiesSet();
|
||||
|
||||
this.mapper = new QueryMapper(converter);
|
||||
|
||||
assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class)))
|
||||
.isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive"));
|
||||
}
|
||||
|
||||
@Test // GH-3783
|
||||
void retainsId$InWithStringArray() {
|
||||
|
||||
org.bson.Document mappedQuery = mapper.getMappedObject(
|
||||
org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"),
|
||||
context.getPersistentEntity(WithExplicitStringId.class));
|
||||
assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}"));
|
||||
}
|
||||
|
||||
@Test // GH-3783
|
||||
void mapsId$InInToObjectIds() {
|
||||
|
||||
org.bson.Document mappedQuery = mapper.getMappedObject(
|
||||
org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"),
|
||||
context.getPersistentEntity(ClassWithDefaultId.class));
|
||||
assertThat(mappedQuery.get("_id"))
|
||||
.isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}"));
|
||||
}
|
||||
|
||||
class WithDeepArrayNesting {
|
||||
|
||||
List<WithNestedArray> level0;
|
||||
@@ -1074,6 +1228,18 @@ public class QueryMapperUnitTests {
|
||||
@Id private String foo;
|
||||
}
|
||||
|
||||
class WithStringId {
|
||||
|
||||
@MongoId String id;
|
||||
String name;
|
||||
}
|
||||
|
||||
class WithExplicitStringId {
|
||||
|
||||
@MongoId(FieldType.STRING) String id;
|
||||
String name;
|
||||
}
|
||||
|
||||
class BigIntegerId {
|
||||
|
||||
@Id private BigInteger id;
|
||||
@@ -1150,18 +1316,22 @@ public class QueryMapperUnitTests {
|
||||
@Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint;
|
||||
}
|
||||
|
||||
static class SimpeEntityWithoutId {
|
||||
static class SimpleEntityWithoutId {
|
||||
|
||||
String stringProperty;
|
||||
Integer integerProperty;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeMap {
|
||||
Map<Integer, SimpeEntityWithoutId> map;
|
||||
Map<Integer, SimpleEntityWithoutId> map;
|
||||
}
|
||||
|
||||
static class EntityWithIntKeyedMapOfMap{
|
||||
Map<Integer, EntityWithComplexValueTypeMap> outerMap;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeList {
|
||||
List<SimpeEntityWithoutId> list;
|
||||
List<SimpleEntityWithoutId> list;
|
||||
}
|
||||
|
||||
static class WithExplicitTargetTypes {
|
||||
@@ -1194,4 +1364,40 @@ public class QueryMapperUnitTests {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
static class WrapperAroundWithPropertyUsingUnderscoreInName {
|
||||
|
||||
WithPropertyUsingUnderscoreInName simple;
|
||||
WithPropertyUsingUnderscoreInName double_underscore;
|
||||
}
|
||||
|
||||
static class WithPropertyUsingUnderscoreInName {
|
||||
|
||||
String fieldname_with_underscores;
|
||||
|
||||
@Field("renamed") String renamed_fieldname_with_underscores;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class Customer {
|
||||
|
||||
@Id private ObjectId id;
|
||||
private String name;
|
||||
private MyAddress address;
|
||||
}
|
||||
|
||||
static class MyAddress {
|
||||
private String street;
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
public static class MyAddressToDocumentConverter implements Converter<MyAddress, org.bson.Document> {
|
||||
|
||||
@Override
|
||||
public org.bson.Document convert(MyAddress address) {
|
||||
org.bson.Document doc = new org.bson.Document();
|
||||
doc.put("street", address.street);
|
||||
return doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ import com.mongodb.DBRef;
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @author Pavel Vodrazka
|
||||
* @author David Julia
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class UpdateMapperUnitTests {
|
||||
@@ -1110,6 +1111,16 @@ class UpdateMapperUnitTests {
|
||||
.isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3688
|
||||
void multipleNumericKeysInNestedPath() {
|
||||
|
||||
Update update = new Update().set("intKeyedMap.12345.map.0", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithIntKeyedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.12345.map.0\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3566
|
||||
void mapsObjectClassPropertyFieldInMapValueTypeAsKey() {
|
||||
|
||||
@@ -1357,6 +1368,10 @@ class UpdateMapperUnitTests {
|
||||
Map<Object, NestedDocument> concreteMap;
|
||||
}
|
||||
|
||||
static class EntityWithIntKeyedMap{
|
||||
Map<Integer, EntityWithObjectMap> intKeyedMap;
|
||||
}
|
||||
|
||||
static class ClassWithEnum {
|
||||
|
||||
Allocation allocation;
|
||||
|
||||
@@ -34,6 +34,8 @@ import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Andreas Zink
|
||||
* @author Clément Petit
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CriteriaUnitTests {
|
||||
|
||||
@@ -310,9 +312,72 @@ public class CriteriaUnitTests {
|
||||
@Test // DATAMONGO-2002
|
||||
public void shouldEqualForSamePattern() {
|
||||
|
||||
Criteria left = new Criteria("field").regex("foo");
|
||||
Criteria right = new Criteria("field").regex("foo");
|
||||
|
||||
assertThat(left).isEqualTo(right);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2002
|
||||
public void shouldEqualForDocument() {
|
||||
|
||||
assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null)))
|
||||
.isEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null)));
|
||||
|
||||
assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null)))
|
||||
.isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two")));
|
||||
|
||||
assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two")))
|
||||
.isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null)));
|
||||
|
||||
assertThat(new Criteria("field").is(new Document("one", 1).append("null", null).append("two", "two")))
|
||||
.isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null)));
|
||||
|
||||
assertThat(new Criteria("field").is(new Document())).isNotEqualTo(new Criteria("field").is("foo"));
|
||||
assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(new Document()));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2002
|
||||
public void shouldEqualForCollection() {
|
||||
|
||||
assertThat(new Criteria("field").is(Arrays.asList("foo", "bar")))
|
||||
.isEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar")));
|
||||
|
||||
assertThat(new Criteria("field").is(Arrays.asList("foo", 1)))
|
||||
.isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar")));
|
||||
|
||||
assertThat(new Criteria("field").is(Collections.singletonList("foo")))
|
||||
.isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar")));
|
||||
|
||||
assertThat(new Criteria("field").is(Arrays.asList("foo", "bar")))
|
||||
.isNotEqualTo(new Criteria("field").is(Collections.singletonList("foo")));
|
||||
|
||||
assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))).isNotEqualTo(new Criteria("field").is("foo"));
|
||||
|
||||
assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar")));
|
||||
}
|
||||
|
||||
@Test // GH-3414
|
||||
public void shouldEqualForSamePatternAndFlags() {
|
||||
|
||||
Criteria left = new Criteria("field").regex("foo", "iu");
|
||||
Criteria right = new Criteria("field").regex("foo");
|
||||
|
||||
assertThat(left).isNotEqualTo(right);
|
||||
}
|
||||
|
||||
@Test // GH-3414
|
||||
public void shouldEqualForNestedPattern() {
|
||||
|
||||
Criteria left = new Criteria("a").orOperator(
|
||||
new Criteria("foo").regex("value", "i"),
|
||||
new Criteria("bar").regex("value")
|
||||
);
|
||||
Criteria right = new Criteria("a").orOperator(
|
||||
new Criteria("foo").regex("value", "i"),
|
||||
new Criteria("bar").regex("value")
|
||||
);
|
||||
|
||||
assertThat(left).isEqualTo(right);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,9 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.repository.Person.Sex;
|
||||
import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension.SampleSecurityContextHolder;
|
||||
import org.springframework.data.querydsl.QSort;
|
||||
@@ -1378,4 +1380,14 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
void caseSensitiveInClauseIgnoresExpressions() {
|
||||
assertThat(repository.findByFirstnameIn(".*")).isEmpty();
|
||||
}
|
||||
|
||||
@Test // GH-3633
|
||||
void annotatedQueryWithNullEqualityCheckShouldWork() {
|
||||
|
||||
operations.updateFirst(Query.query(Criteria.where("id").is(dave.getId())), Update.update("age", null), Person.class);
|
||||
|
||||
Person byQueryWithNullEqualityCheck = repository.findByQueryWithNullEqualityCheck();
|
||||
assertThat(byQueryWithNullEqualityCheck.getId()).isEqualTo(dave.getId());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -403,4 +403,7 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
Person findPersonByManyArguments(String firstname, String lastname, String email, Integer age, Sex sex,
|
||||
Date createdAt, List<String> skills, String street, String zipCode, //
|
||||
String city, UUID uniqueId, String username, String password);
|
||||
|
||||
@Query("{ 'age' : null }")
|
||||
Person findByQueryWithNullEqualityCheck();
|
||||
}
|
||||
|
||||
@@ -20,15 +20,19 @@ import static org.springframework.data.domain.ExampleMatcher.*;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Value;
|
||||
import lombok.With;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanClassLoaderAware;
|
||||
@@ -44,10 +48,9 @@ import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory;
|
||||
import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository;
|
||||
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
|
||||
import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
@@ -56,19 +59,22 @@ import org.springframework.util.ClassUtils;
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Ruben J Garcia
|
||||
* @author Clément Petit
|
||||
*/
|
||||
@RunWith(SpringRunner.class)
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration("classpath:reactive-infrastructure.xml")
|
||||
public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware {
|
||||
|
||||
@Autowired private ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveMongoRepositoryFactory factory;
|
||||
ClassLoader classLoader;
|
||||
BeanFactory beanFactory;
|
||||
ReactivePersonRepostitory repository;
|
||||
private ReactiveMongoRepositoryFactory factory;
|
||||
private ClassLoader classLoader;
|
||||
private BeanFactory beanFactory;
|
||||
private ReactivePersonRepository repository;
|
||||
private ReactiveImmutablePersonRepository immutableRepository;
|
||||
|
||||
private ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia;
|
||||
private ImmutableReactivePerson keith, james, mariah;
|
||||
|
||||
@Override
|
||||
public void setBeanClassLoader(ClassLoader classLoader) {
|
||||
@@ -80,8 +86,8 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
this.beanFactory = beanFactory;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
|
||||
factory = new ReactiveMongoRepositoryFactory(template);
|
||||
factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class);
|
||||
@@ -89,9 +95,11 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
factory.setBeanFactory(beanFactory);
|
||||
factory.setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT);
|
||||
|
||||
repository = factory.getRepository(ReactivePersonRepostitory.class);
|
||||
repository = factory.getRepository(ReactivePersonRepository.class);
|
||||
immutableRepository = factory.getRepository(ReactiveImmutablePersonRepository.class);
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
dave = new ReactivePerson("Dave", "Matthews", 42);
|
||||
oliver = new ReactivePerson("Oliver August", "Matthews", 4);
|
||||
@@ -100,6 +108,9 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
stefan = new ReactivePerson("Stefan", "Lessard", 34);
|
||||
leroi = new ReactivePerson("Leroi", "Moore", 41);
|
||||
alicia = new ReactivePerson("Alicia", "Keys", 30);
|
||||
keith = new ImmutableReactivePerson(null, "Keith", "Urban", 53);
|
||||
james = new ImmutableReactivePerson(null, "James", "Arthur", 33);
|
||||
mariah = new ImmutableReactivePerson(null, "Mariah", "Carey", 51);
|
||||
|
||||
repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).as(StepVerifier::create) //
|
||||
.expectNextCount(7) //
|
||||
@@ -107,78 +118,78 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void existsByIdShouldReturnTrueForExistingObject() {
|
||||
void existsByIdShouldReturnTrueForExistingObject() {
|
||||
repository.existsById(dave.id).as(StepVerifier::create).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void existsByIdShouldReturnFalseForAbsentObject() {
|
||||
void existsByIdShouldReturnFalseForAbsentObject() {
|
||||
repository.existsById("unknown").as(StepVerifier::create).expectNext(false).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void existsByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
void existsByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
repository.existsById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void existsByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
void existsByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
repository.existsById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void existsByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
void existsByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
repository.existsById(Mono.empty()).as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findByIdShouldReturnObject() {
|
||||
void findByIdShouldReturnObject() {
|
||||
repository.findById(dave.id).as(StepVerifier::create).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findByIdShouldCompleteWithoutValueForAbsentObject() {
|
||||
void findByIdShouldCompleteWithoutValueForAbsentObject() {
|
||||
repository.findById("unknown").as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findByIdByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
void findByIdByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
repository.findById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void findByIdByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
void findByIdByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
repository.findById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
repository.findById(Mono.empty()).as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findAllShouldReturnAllResults() {
|
||||
void findAllShouldReturnAllResults() {
|
||||
repository.findAll().as(StepVerifier::create).expectNextCount(7).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findAllByIterableOfIdShouldReturnResults() {
|
||||
void findAllByIterableOfIdShouldReturnResults() {
|
||||
repository.findAllById(Arrays.asList(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2)
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findAllByPublisherOfIdShouldReturnResults() {
|
||||
void findAllByPublisherOfIdShouldReturnResults() {
|
||||
repository.findAllById(Flux.just(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findAllByEmptyPublisherOfIdShouldReturnResults() {
|
||||
void findAllByEmptyPublisherOfIdShouldReturnResults() {
|
||||
repository.findAllById(Flux.empty()).as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findAllWithSortShouldReturnResults() {
|
||||
void findAllWithSortShouldReturnResults() {
|
||||
|
||||
repository.findAll(Sort.by(new Order(Direction.ASC, "age"))).as(StepVerifier::create) //
|
||||
.expectNextCount(7) //
|
||||
@@ -186,12 +197,12 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void countShouldReturnNumberOfRecords() {
|
||||
void countShouldReturnNumberOfRecords() {
|
||||
repository.count().as(StepVerifier::create).expectNext(7L).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void insertEntityShouldInsertEntity() {
|
||||
void insertEntityShouldInsertEntity() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -203,7 +214,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void insertShouldDeferredWrite() {
|
||||
void insertShouldDeferredWrite() {
|
||||
|
||||
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
|
||||
|
||||
@@ -213,7 +224,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void insertIterableOfEntitiesShouldInsertEntity() {
|
||||
void insertIterableOfEntitiesShouldInsertEntity() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -231,7 +242,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void insertPublisherOfEntitiesShouldInsertEntity() {
|
||||
void insertPublisherOfEntitiesShouldInsertEntity() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -247,7 +258,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void saveEntityShouldUpdateExistingEntity() {
|
||||
void saveEntityShouldUpdateExistingEntity() {
|
||||
|
||||
dave.setFirstname("Hello, Dave");
|
||||
dave.setLastname("Bowman");
|
||||
@@ -264,7 +275,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void saveEntityShouldInsertNewEntity() {
|
||||
void saveEntityShouldInsertNewEntity() {
|
||||
|
||||
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
|
||||
|
||||
@@ -278,7 +289,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void saveIterableOfNewEntitiesShouldInsertEntity() {
|
||||
void saveIterableOfNewEntitiesShouldInsertEntity() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -294,7 +305,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void saveIterableOfMixedEntitiesShouldInsertEntity() {
|
||||
void saveIterableOfMixedEntitiesShouldInsertEntity() {
|
||||
|
||||
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
|
||||
|
||||
@@ -310,7 +321,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void savePublisherOfEntitiesShouldInsertEntity() {
|
||||
void savePublisherOfEntitiesShouldInsertEntity() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -325,8 +336,20 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
assertThat(boyd.getId()).isNotNull();
|
||||
}
|
||||
|
||||
@Test // GH-3609
|
||||
void savePublisherOfImmutableEntitiesShouldInsertEntity() {
|
||||
|
||||
immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
immutableRepository.saveAll(Flux.just(keith)).as(StepVerifier::create) //
|
||||
.consumeNextWith(actual -> {
|
||||
assertThat(actual.id).isNotNull();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deleteAllShouldRemoveEntities() {
|
||||
void deleteAllShouldRemoveEntities() {
|
||||
|
||||
repository.deleteAll().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -334,7 +357,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deleteByIdShouldRemoveEntity() {
|
||||
void deleteByIdShouldRemoveEntity() {
|
||||
|
||||
repository.deleteById(dave.id).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -342,7 +365,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void deleteByIdUsingMonoShouldRemoveEntity() {
|
||||
void deleteByIdUsingMonoShouldRemoveEntity() {
|
||||
|
||||
repository.deleteById(Mono.just(dave.id)).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -350,7 +373,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void deleteByIdUsingFluxShouldRemoveEntity() {
|
||||
void deleteByIdUsingFluxShouldRemoveEntity() {
|
||||
|
||||
repository.deleteById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -359,7 +382,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deleteShouldRemoveEntity() {
|
||||
void deleteShouldRemoveEntity() {
|
||||
|
||||
repository.delete(dave).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -368,7 +391,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deleteIterableOfEntitiesShouldRemoveEntities() {
|
||||
void deleteIterableOfEntitiesShouldRemoveEntities() {
|
||||
|
||||
repository.deleteAll(Arrays.asList(dave, boyd)).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -378,7 +401,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deletePublisherOfEntitiesShouldRemoveEntities() {
|
||||
void deletePublisherOfEntitiesShouldRemoveEntities() {
|
||||
|
||||
repository.deleteAll(Flux.just(dave, boyd)).as(StepVerifier::create).verifyComplete();
|
||||
|
||||
@@ -388,7 +411,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findOneByExampleShouldReturnObject() {
|
||||
void findOneByExampleShouldReturnObject() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave);
|
||||
|
||||
@@ -396,7 +419,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findAllByExampleShouldReturnObjects() {
|
||||
void findAllByExampleShouldReturnObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
@@ -404,7 +427,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findAllByExampleAndSortShouldReturnObjects() {
|
||||
void findAllByExampleAndSortShouldReturnObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
@@ -413,7 +436,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void countByExampleShouldCountObjects() {
|
||||
void countByExampleShouldCountObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
@@ -421,7 +444,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void existsByExampleShouldReturnExisting() {
|
||||
void existsByExampleShouldReturnExisting() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
@@ -429,7 +452,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void existsByExampleShouldReturnNonExisting() {
|
||||
void existsByExampleShouldReturnNonExisting() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(new ReactivePerson("foo", "bar", -1));
|
||||
|
||||
@@ -437,7 +460,7 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() {
|
||||
void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(new ReactivePerson(null, "Matthews", -1),
|
||||
matching().withIgnorePaths("age"));
|
||||
@@ -446,19 +469,23 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1907
|
||||
public void findOneByExampleWithoutResultShouldCompleteEmpty() {
|
||||
void findOneByExampleWithoutResultShouldCompleteEmpty() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(new ReactivePerson("foo", "bar", -1));
|
||||
|
||||
repository.findOne(example).as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
interface ReactivePersonRepostitory extends ReactiveMongoRepository<ReactivePerson, String> {
|
||||
interface ReactivePersonRepository extends ReactiveMongoRepository<ReactivePerson, String> {
|
||||
|
||||
Flux<ReactivePerson> findByLastname(String lastname);
|
||||
|
||||
}
|
||||
|
||||
interface ReactiveImmutablePersonRepository extends ReactiveMongoRepository<ImmutableReactivePerson, String> {
|
||||
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
static class ReactivePerson {
|
||||
@@ -469,11 +496,30 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
String lastname;
|
||||
int age;
|
||||
|
||||
public ReactivePerson(String firstname, String lastname, int age) {
|
||||
ReactivePerson(String firstname, String lastname, int age) {
|
||||
|
||||
this.firstname = firstname;
|
||||
this.lastname = lastname;
|
||||
this.age = age;
|
||||
}
|
||||
}
|
||||
|
||||
@With
|
||||
@Value
|
||||
static class ImmutableReactivePerson {
|
||||
|
||||
@Id String id;
|
||||
|
||||
String firstname;
|
||||
String lastname;
|
||||
int age;
|
||||
|
||||
ImmutableReactivePerson(@Nullable String id, String firstname, String lastname, int age) {
|
||||
this.id = id;
|
||||
this.firstname = firstname;
|
||||
this.lastname = lastname;
|
||||
this.age = age;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -159,6 +159,14 @@ public class StringBasedAggregationUnitTests {
|
||||
assertThat(executeAggregation("returnCollection").result).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Test // GH-3623
|
||||
public void returnNullWhenSingleResultIsNotPresent() {
|
||||
|
||||
when(aggregationResults.getMappedResults()).thenReturn(Collections.emptyList());
|
||||
|
||||
assertThat(executeAggregation("simpleReturnType").result).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2153
|
||||
public void returnRawResultType() {
|
||||
assertThat(executeAggregation("returnRawResultType").result).isEqualTo(aggregationResults);
|
||||
@@ -312,6 +320,9 @@ public class StringBasedAggregationUnitTests {
|
||||
|
||||
@Aggregation(RAW_GROUP_BY_LASTNAME_STRING)
|
||||
Page<Person> invalidPageReturnType(Pageable page);
|
||||
|
||||
@Aggregation(RAW_GROUP_BY_LASTNAME_STRING)
|
||||
String simpleReturnType();
|
||||
}
|
||||
|
||||
static class PersonAggregate {
|
||||
|
||||
@@ -27,6 +27,8 @@ import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -122,6 +124,20 @@ public class QuerydslMongoPredicateExecutorIntegrationTests {
|
||||
.containsExactly(dave);
|
||||
}
|
||||
|
||||
@Test // GH-3751
|
||||
public void findPage() {
|
||||
|
||||
assertThat(repository
|
||||
.findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())),
|
||||
PageRequest.of(0, 10))
|
||||
.getContent()).containsExactly(dave);
|
||||
|
||||
assertThat(repository
|
||||
.findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())),
|
||||
Pageable.unpaged())
|
||||
.getContent()).containsExactly(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-362, DATAMONGO-1848
|
||||
public void springDataMongodbQueryShouldAllowJoinOnDBref() {
|
||||
|
||||
|
||||
@@ -119,6 +119,9 @@ public class MongoTestTemplateConfiguration {
|
||||
mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(mappingContextConfigurer.initialEntitySet());
|
||||
mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex);
|
||||
if(mongoConverterConfigurer.customConversions != null) {
|
||||
mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder());
|
||||
}
|
||||
mappingContext.afterPropertiesSet();
|
||||
}
|
||||
|
||||
|
||||
@@ -383,6 +383,13 @@ class ParameterBindingJsonReaderUnitTests {
|
||||
.parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ 1.948516, 48.799029 ] , 0.004 ] } } }"));
|
||||
}
|
||||
|
||||
@Test // GH-3633
|
||||
void parsesNullValue() {
|
||||
|
||||
Document target = parse("{ 'parent' : null }");
|
||||
assertThat(target).isEqualTo(new Document("parent", null));
|
||||
}
|
||||
|
||||
private static Document parse(String json, Object... args) {
|
||||
|
||||
ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, args);
|
||||
|
||||
650
src/main/asciidoc/reference/aggregation-framework.adoc
Normal file
650
src/main/asciidoc/reference/aggregation-framework.adoc
Normal file
@@ -0,0 +1,650 @@
|
||||
[[mongo.aggregation]]
|
||||
== Aggregation Framework Support
|
||||
|
||||
Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB.
|
||||
|
||||
[[mongo.aggregation.basic-concepts]]
|
||||
=== Basic Concepts
|
||||
|
||||
The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationOperation`, and `AggregationResults`.
|
||||
|
||||
* `Aggregation`
|
||||
+
|
||||
An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class.
|
||||
+
|
||||
The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter.
|
||||
+
|
||||
* `TypedAggregation`
|
||||
+
|
||||
A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields.
|
||||
+
|
||||
At runtime, field references get checked against the given input type, considering potential `@Field` annotations and raising errors when referencing nonexistent properties.
|
||||
+
|
||||
* `AggregationOperation`
|
||||
+
|
||||
An `AggregationOperation` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationOperation`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`.
|
||||
+
|
||||
* `AggregationResults`
|
||||
+
|
||||
`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation.
|
||||
+
|
||||
The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework:
|
||||
+
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
pipelineOP1(),
|
||||
pipelineOP2(),
|
||||
pipelineOPn()
|
||||
);
|
||||
|
||||
AggregationResults<OutputType> results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class);
|
||||
List<OutputType> mappedResult = results.getMappedResults();
|
||||
----
|
||||
|
||||
Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence.
|
||||
|
||||
[[mongo.aggregation.supported-aggregation-operations]]
|
||||
=== Supported Aggregation Operations
|
||||
|
||||
The MongoDB Aggregation Framework provides the following types of aggregation operations:
|
||||
|
||||
* Pipeline Aggregation Operators
|
||||
* Group/Accumulator Aggregation Operators
|
||||
* Boolean Aggregation Operators
|
||||
* Comparison Aggregation Operators
|
||||
* Arithmetic Aggregation Operators
|
||||
* String Aggregation Operators
|
||||
* Date Aggregation Operators
|
||||
* Array Aggregation Operators
|
||||
* Conditional Aggregation Operators
|
||||
* Lookup Aggregation Operators
|
||||
* Convert Aggregation Operators
|
||||
* Object Aggregation Operators
|
||||
* Script Aggregation Operators
|
||||
|
||||
At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB:
|
||||
|
||||
.Aggregation Operations currently supported by Spring Data MongoDB
|
||||
[cols="2*"]
|
||||
|===
|
||||
| Pipeline Aggregation Operators
|
||||
| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind`
|
||||
|
||||
| Set Aggregation Operators
|
||||
| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue`
|
||||
|
||||
| Group/Accumulator Aggregation Operators
|
||||
| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp`
|
||||
|
||||
| Arithmetic Aggregation Operators
|
||||
| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc`
|
||||
|
||||
| String Aggregation Operators
|
||||
| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim`
|
||||
|
||||
| Comparison Aggregation Operators
|
||||
| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne`
|
||||
|
||||
| Array Aggregation Operators
|
||||
| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip`
|
||||
|
||||
| Literal Operators
|
||||
| `literal`
|
||||
|
||||
| Date Aggregation Operators
|
||||
| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`
|
||||
|
||||
| Variable Operators
|
||||
| `map`
|
||||
|
||||
| Conditional Aggregation Operators
|
||||
| `cond`, `ifNull`, `switch`
|
||||
|
||||
| Type Aggregation Operators
|
||||
| `type`
|
||||
|
||||
| Convert Aggregation Operators
|
||||
| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString`
|
||||
|
||||
| Object Aggregation Operators
|
||||
| `objectToArray`, `mergeObjects`
|
||||
|
||||
| Script Aggregation Operators
|
||||
| `function`, `accumulator`
|
||||
|===
|
||||
|
||||
+++*+++ The operation is mapped or added by Spring Data MongoDB.
|
||||
|
||||
Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions.
|
||||
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method.
|
||||
Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression:
|
||||
|
||||
.Projection expression examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}
|
||||
project("name", "netPrice")
|
||||
|
||||
// generates {$project: {thing1: $thing2}}
|
||||
project().and("thing1").as("thing2")
|
||||
|
||||
// generates {$project: {a: 1, b: 1, thing2: $thing1}}
|
||||
project("a","b").and("thing1").as("thing2")
|
||||
----
|
||||
====
|
||||
|
||||
.Multi-Stage Aggregation using Projection and Sorting
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}}
|
||||
project("name", "netPrice"), sort(ASC, "name")
|
||||
|
||||
// generates {$project: {name: $firstname}}, {$sort: {name: 1}}
|
||||
project().and("firstname").as("name"), sort(ASC, "name")
|
||||
|
||||
// does not work
|
||||
project().and("firstname").as("name"), sort(ASC, "firstname")
|
||||
----
|
||||
====
|
||||
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted Classification
|
||||
|
||||
As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}}
|
||||
bucket("price").withBoundaries(0, 100, 400);
|
||||
|
||||
// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}}
|
||||
bucket("price").withBoundaries(0, 100).withDefault("Other");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutputCount().as("count");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5}}
|
||||
bucketAuto("price", 5)
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}}
|
||||
bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other");
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}}
|
||||
bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> through `andOutputExpression()`.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted Aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors.
|
||||
|
||||
You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents.
|
||||
|
||||
Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples:
|
||||
|
||||
.Facet operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}}
|
||||
facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice"))
|
||||
|
||||
// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}}
|
||||
facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry"))
|
||||
|
||||
// generates {$facet: {categorizedByYear: [
|
||||
// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}},
|
||||
// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}}
|
||||
// ]}}
|
||||
facet(project("title").and("publicationDate").extractYear().as("publicationYear"),
|
||||
bucketAuto("publicationYear", 5).andOutput("title").push().as("titles"))
|
||||
.as("categorizedByYear"))
|
||||
----
|
||||
====
|
||||
|
||||
Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.sort-by-count]]
|
||||
==== Sort By Count
|
||||
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <<mongo.aggregation.facet>>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
|
||||
.Sort by count example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates { $sortByCount: "$country" }
|
||||
sortByCount("country");
|
||||
----
|
||||
====
|
||||
|
||||
A sort by count operation is equivalent to the following BSON (Binary JSON):
|
||||
|
||||
----
|
||||
{ $group: { _id: <expression>, count: { $sum: 1 } } },
|
||||
{ $sort: { count: -1 } }
|
||||
----
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
|
||||
We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex Calculations with SpEL expressions
|
||||
|
||||
Consider the following SpEL expression:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
1 + (q + 1) / (q - 1)
|
||||
----
|
||||
|
||||
The preceding expression is translated into the following projection expression part:
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
{ "$add" : [ 1, {
|
||||
"$divide" : [ {
|
||||
"$add":["$q", 1]}, {
|
||||
"$subtract":[ "$q", 1]}
|
||||
]
|
||||
}]}
|
||||
----
|
||||
|
||||
You can see examples in more context in <<mongo.aggregation.examples.example5>> and <<mongo.aggregation.examples.example6>>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB:
|
||||
|
||||
.Supported SpEL transformations
|
||||
[%header,cols="2"]
|
||||
|===
|
||||
| SpEL Expression
|
||||
| Mongo Expression Part
|
||||
| a == b
|
||||
| { $eq : [$a, $b] }
|
||||
| a != b
|
||||
| { $ne : [$a , $b] }
|
||||
| a > b
|
||||
| { $gt : [$a, $b] }
|
||||
| a >= b
|
||||
| { $gte : [$a, $b] }
|
||||
| a < b
|
||||
| { $lt : [$a, $b] }
|
||||
| a <= b
|
||||
| { $lte : [$a, $b] }
|
||||
| a + b
|
||||
| { $add : [$a, $b] }
|
||||
| a - b
|
||||
| { $subtract : [$a, $b] }
|
||||
| a * b
|
||||
| { $multiply : [$a, $b] }
|
||||
| a / b
|
||||
| { $divide : [$a, $b] }
|
||||
| a^b
|
||||
| { $pow : [$a, $b] }
|
||||
| a % b
|
||||
| { $mod : [$a, $b] }
|
||||
| a && b
|
||||
| { $and : [$a, $b] }
|
||||
| a \|\| b
|
||||
| { $or : [$a, $b] }
|
||||
| !a
|
||||
| { $not : [$a] }
|
||||
|===
|
||||
|
||||
In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// { $setEquals : [$a, [5, 8, 13] ] }
|
||||
.andExpression("setEquals(a, new int[]{5, 8, 13})");
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples]]
|
||||
==== Aggregation Framework Examples
|
||||
|
||||
The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB.
|
||||
|
||||
[[mongo.aggregation.examples.example1]]
|
||||
===== Aggregation Framework Example 1
|
||||
|
||||
In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class TagCount {
|
||||
String tag;
|
||||
int n;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
project("tags"),
|
||||
unwind("tags"),
|
||||
group("tags").count().as("n"),
|
||||
project("n").and("tag").previousOperation(),
|
||||
sort(DESC, "n")
|
||||
);
|
||||
|
||||
AggregationResults<TagCount> results = mongoTemplate.aggregate(agg, "tags", TagCount.class);
|
||||
List<TagCount> tagCount = results.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listing uses the following algorithm:
|
||||
|
||||
. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`.
|
||||
. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection.
|
||||
. Use the `unwind` operation to generate a new document for each tag within the `tags` array.
|
||||
. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`).
|
||||
. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`.
|
||||
. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order.
|
||||
. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument.
|
||||
|
||||
Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example2]]
|
||||
===== Aggregation Framework Example 2
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class ZipInfo {
|
||||
String id;
|
||||
String city;
|
||||
String state;
|
||||
@Field("pop") int population;
|
||||
@Field("loc") double[] location;
|
||||
}
|
||||
|
||||
class City {
|
||||
String name;
|
||||
int population;
|
||||
}
|
||||
|
||||
class ZipInfoStats {
|
||||
String id;
|
||||
String state;
|
||||
City biggestCity;
|
||||
City smallestCity;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> aggregation = newAggregation(ZipInfo.class,
|
||||
group("state", "city")
|
||||
.sum("population").as("pop"),
|
||||
sort(ASC, "pop", "state", "city"),
|
||||
group("state")
|
||||
.last("city").as("biggestCity")
|
||||
.last("pop").as("biggestPop")
|
||||
.first("city").as("smallestCity")
|
||||
.first("pop").as("smallestPop"),
|
||||
project()
|
||||
.and("state").previousOperation()
|
||||
.and("biggestCity")
|
||||
.nested(bind("name", "biggestCity").and("population", "biggestPop"))
|
||||
.and("smallestCity")
|
||||
.nested(bind("name", "smallestCity").and("population", "smallestPop")),
|
||||
sort(ASC, "state")
|
||||
);
|
||||
|
||||
AggregationResults<ZipInfoStats> result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class);
|
||||
ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0);
|
||||
----
|
||||
|
||||
Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format.
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field.
|
||||
. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled).
|
||||
. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation.
|
||||
. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method.
|
||||
. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example3]]
|
||||
===== Aggregation Framework Example 3
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class StateStats {
|
||||
@Id String id;
|
||||
String state;
|
||||
@Field("totalPop") int totalPopulation;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> agg = newAggregation(ZipInfo.class,
|
||||
group("state").sum("population").as("totalPop"),
|
||||
sort(ASC, previousOperation(), "totalPop"),
|
||||
match(where("totalPop").gte(10 * 1000 * 1000))
|
||||
);
|
||||
|
||||
AggregationResults<StateStats> result = mongoTemplate.aggregate(agg, StateStats.class);
|
||||
List<StateStats> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`.
|
||||
. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order.
|
||||
. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example4]]
|
||||
===== Aggregation Framework Example 4
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.and("netPrice").plus(1).as("netPricePlus1")
|
||||
.and("netPrice").minus(1).as("netPriceMinus1")
|
||||
.and("netPrice").multiply(1.19).as("grossPrice")
|
||||
.and("netPrice").divide(2).as("netPriceDiv2")
|
||||
.and("spaceUnits").mod(2).as("spaceUnitsMod2")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example5]]
|
||||
===== Aggregation Framework Example 5
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("netPrice + 1").as("netPricePlus1")
|
||||
.andExpression("netPrice - 1").as("netPriceMinus1")
|
||||
.andExpression("netPrice / 2").as("netPriceDiv2")
|
||||
.andExpression("netPrice * 1.19").as("grossPrice")
|
||||
.andExpression("spaceUnits % 2").as("spaceUnitsMod2")
|
||||
.andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge")
|
||||
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples.example6]]
|
||||
===== Aggregation Framework Example 6
|
||||
|
||||
This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
double shippingCosts = 1.2;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we can also refer to other fields of the document within the SpEL expression.
|
||||
|
||||
[[mongo.aggregation.examples.example7]]
|
||||
===== Aggregation Framework Example 7
|
||||
|
||||
This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation].
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class InventoryItem {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
}
|
||||
|
||||
public class InventoryItemProjection {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
int discount
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<InventoryItem> agg = newAggregation(InventoryItem.class,
|
||||
project("item").and("discount")
|
||||
.applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250))
|
||||
.then(30)
|
||||
.otherwise(20))
|
||||
.and(ifNull("description", "Unspecified")).as("description")
|
||||
);
|
||||
|
||||
AggregationResults<InventoryItemProjection> result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class);
|
||||
List<InventoryItemProjection> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description.
|
||||
|
||||
As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression.
|
||||
|
||||
.Conditional aggregation projection
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
TypedAggregation<Book> agg = Aggregation.newAggregation(Book.class,
|
||||
project("title")
|
||||
.and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1>
|
||||
.equalToValue("")) <2>
|
||||
.then("$$REMOVE") <3>
|
||||
.otherwiseValueOf("author.middle") <4>
|
||||
)
|
||||
.as("author.middle"));
|
||||
----
|
||||
<1> If the value of the field `author.middle`
|
||||
<2> does not contain a value,
|
||||
<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field.
|
||||
<4> Otherwise, add the field value of `author.middle`.
|
||||
====
|
||||
115
src/main/asciidoc/reference/gridfs.adoc
Normal file
115
src/main/asciidoc/reference/gridfs.adoc
Normal file
@@ -0,0 +1,115 @@
|
||||
[[gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
.JavaConfig setup for a GridFsTemplate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The corresponding XML configuration follows:
|
||||
|
||||
.XML configuration for a GridFsTemplate
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
GridFsResources[] txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
@@ -214,7 +214,7 @@ public class AppConfig {
|
||||
----
|
||||
====
|
||||
|
||||
To access the `com.mongodb.client.MongoClient` object created by the `MongoClientFactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired Mongo mongo;` field.
|
||||
To access the `com.mongodb.client.MongoClient` object created by the `MongoClientFactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired MongoClient mongoClient;` field.
|
||||
|
||||
[[mongo.mongo-xml-config]]
|
||||
=== Registering a Mongo Instance by Using XML-based Metadata
|
||||
@@ -2338,656 +2338,7 @@ GroupByResults<XObject> results = mongoTemplate.group(where("x").gt(0),
|
||||
keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class);
|
||||
----
|
||||
|
||||
[[mongo.aggregation]]
|
||||
== Aggregation Framework Support
|
||||
|
||||
Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB.
|
||||
|
||||
[[mongo.aggregation.basic-concepts]]
|
||||
=== Basic Concepts
|
||||
|
||||
The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationOperation`, and `AggregationResults`.
|
||||
|
||||
* `Aggregation`
|
||||
+
|
||||
An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class.
|
||||
+
|
||||
The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter.
|
||||
+
|
||||
* `TypedAggregation`
|
||||
+
|
||||
A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields.
|
||||
+
|
||||
At runtime, field references get checked against the given input type, considering potential `@Field` annotations and raising errors when referencing nonexistent properties.
|
||||
+
|
||||
* `AggregationOperation`
|
||||
+
|
||||
An `AggregationOperation` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationOperation`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`.
|
||||
+
|
||||
* `AggregationResults`
|
||||
+
|
||||
`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation.
|
||||
+
|
||||
The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework:
|
||||
+
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
pipelineOP1(),
|
||||
pipelineOP2(),
|
||||
pipelineOPn()
|
||||
);
|
||||
|
||||
AggregationResults<OutputType> results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class);
|
||||
List<OutputType> mappedResult = results.getMappedResults();
|
||||
----
|
||||
|
||||
Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence.
|
||||
|
||||
[[mongo.aggregation.supported-aggregation-operations]]
|
||||
=== Supported Aggregation Operations
|
||||
|
||||
The MongoDB Aggregation Framework provides the following types of aggregation operations:
|
||||
|
||||
* Pipeline Aggregation Operators
|
||||
* Group Aggregation Operators
|
||||
* Boolean Aggregation Operators
|
||||
* Comparison Aggregation Operators
|
||||
* Arithmetic Aggregation Operators
|
||||
* String Aggregation Operators
|
||||
* Date Aggregation Operators
|
||||
* Array Aggregation Operators
|
||||
* Conditional Aggregation Operators
|
||||
* Lookup Aggregation Operators
|
||||
* Convert Aggregation Operators
|
||||
* Object Aggregation Operators
|
||||
* Script Aggregation Operators
|
||||
|
||||
At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB:
|
||||
|
||||
.Aggregation Operations currently supported by Spring Data MongoDB
|
||||
[cols="2*"]
|
||||
|===
|
||||
| Pipeline Aggregation Operators
|
||||
| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind`
|
||||
|
||||
| Set Aggregation Operators
|
||||
| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue`
|
||||
|
||||
| Group Aggregation Operators
|
||||
| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp`
|
||||
|
||||
| Arithmetic Aggregation Operators
|
||||
| `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc`
|
||||
|
||||
| String Aggregation Operators
|
||||
| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim`
|
||||
|
||||
| Comparison Aggregation Operators
|
||||
| `eq` (*via: `is`), `gt`, `gte`, `lt`, `lte`, `ne`
|
||||
|
||||
| Array Aggregation Operators
|
||||
| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip`
|
||||
|
||||
| Literal Operators
|
||||
| `literal`
|
||||
|
||||
| Date Aggregation Operators
|
||||
| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`
|
||||
|
||||
| Variable Operators
|
||||
| `map`
|
||||
|
||||
| Conditional Aggregation Operators
|
||||
| `cond`, `ifNull`, `switch`
|
||||
|
||||
| Type Aggregation Operators
|
||||
| `type`
|
||||
|
||||
| Convert Aggregation Operators
|
||||
| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString`
|
||||
|
||||
| Object Aggregation Operators
|
||||
| `objectToArray`, `mergeObjects`
|
||||
|
||||
| Script Aggregation Operators
|
||||
| `function`, `accumulator`
|
||||
|===
|
||||
|
||||
* The operation is mapped or added by Spring Data MongoDB.
|
||||
|
||||
Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions.
|
||||
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method.
|
||||
Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression:
|
||||
|
||||
.Projection expression examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}
|
||||
project("name", "netPrice")
|
||||
|
||||
// generates {$project: {thing1: $thing2}}
|
||||
project().and("thing1").as("thing2")
|
||||
|
||||
// generates {$project: {a: 1, b: 1, thing2: $thing1}}
|
||||
project("a","b").and("thing1").as("thing2")
|
||||
----
|
||||
====
|
||||
|
||||
.Multi-Stage Aggregation using Projection and Sorting
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}}
|
||||
project("name", "netPrice"), sort(ASC, "name")
|
||||
|
||||
// generates {$project: {name: $firstname}}, {$sort: {name: 1}}
|
||||
project().and("firstname").as("name"), sort(ASC, "name")
|
||||
|
||||
// does not work
|
||||
project().and("firstname").as("name"), sort(ASC, "firstname")
|
||||
----
|
||||
====
|
||||
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted Classification
|
||||
|
||||
As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}}
|
||||
bucket("price").withBoundaries(0, 100, 400);
|
||||
|
||||
// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}}
|
||||
bucket("price").withBoundaries(0, 100).withDefault("Other");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutputCount().as("count");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5}}
|
||||
bucketAuto("price", 5)
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}}
|
||||
bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other");
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}}
|
||||
bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> through `andOutputExpression()`.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted Aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors.
|
||||
|
||||
You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents.
|
||||
|
||||
Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples:
|
||||
|
||||
.Facet operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}}
|
||||
facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice"))
|
||||
|
||||
// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}}
|
||||
facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry"))
|
||||
|
||||
// generates {$facet: {categorizedByYear: [
|
||||
// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}},
|
||||
// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}}
|
||||
// ]}}
|
||||
facet(project("title").and("publicationDate").extractYear().as("publicationYear"),
|
||||
bucketAuto("publicationYear", 5).andOutput("title").push().as("titles"))
|
||||
.as("categorizedByYear"))
|
||||
----
|
||||
====
|
||||
|
||||
Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.sort-by-count]]
|
||||
==== Sort By Count
|
||||
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <<mongo.aggregation.facet>>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
|
||||
.Sort by count example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates { $sortByCount: "$country" }
|
||||
sortByCount("country");
|
||||
----
|
||||
====
|
||||
|
||||
A sort by count operation is equivalent to the following BSON (Binary JSON):
|
||||
|
||||
----
|
||||
{ $group: { _id: <expression>, count: { $sum: 1 } } },
|
||||
{ $sort: { count: -1 } }
|
||||
----
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
|
||||
We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex Calculations with SpEL expressions
|
||||
|
||||
Consider the following SpEL expression:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
1 + (q + 1) / (q - 1)
|
||||
----
|
||||
|
||||
The preceding expression is translated into the following projection expression part:
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
{ "$add" : [ 1, {
|
||||
"$divide" : [ {
|
||||
"$add":["$q", 1]}, {
|
||||
"$subtract":[ "$q", 1]}
|
||||
]
|
||||
}]}
|
||||
----
|
||||
|
||||
You can see examples in more context in <<mongo.aggregation.examples.example5>> and <<mongo.aggregation.examples.example6>>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB:
|
||||
|
||||
.Supported SpEL transformations
|
||||
[%header,cols="2"]
|
||||
|===
|
||||
| SpEL Expression
|
||||
| Mongo Expression Part
|
||||
| a == b
|
||||
| { $eq : [$a, $b] }
|
||||
| a != b
|
||||
| { $ne : [$a , $b] }
|
||||
| a > b
|
||||
| { $gt : [$a, $b] }
|
||||
| a >= b
|
||||
| { $gte : [$a, $b] }
|
||||
| a < b
|
||||
| { $lt : [$a, $b] }
|
||||
| a <= b
|
||||
| { $lte : [$a, $b] }
|
||||
| a + b
|
||||
| { $add : [$a, $b] }
|
||||
| a - b
|
||||
| { $subtract : [$a, $b] }
|
||||
| a * b
|
||||
| { $multiply : [$a, $b] }
|
||||
| a / b
|
||||
| { $divide : [$a, $b] }
|
||||
| a^b
|
||||
| { $pow : [$a, $b] }
|
||||
| a % b
|
||||
| { $mod : [$a, $b] }
|
||||
| a && b
|
||||
| { $and : [$a, $b] }
|
||||
| a \|\| b
|
||||
| { $or : [$a, $b] }
|
||||
| !a
|
||||
| { $not : [$a] }
|
||||
|===
|
||||
|
||||
In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// { $setEquals : [$a, [5, 8, 13] ] }
|
||||
.andExpression("setEquals(a, new int[]{5, 8, 13})");
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples]]
|
||||
==== Aggregation Framework Examples
|
||||
|
||||
The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB.
|
||||
|
||||
[[mongo.aggregation.examples.example1]]
|
||||
===== Aggregation Framework Example 1
|
||||
|
||||
In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class TagCount {
|
||||
String tag;
|
||||
int n;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
project("tags"),
|
||||
unwind("tags"),
|
||||
group("tags").count().as("n"),
|
||||
project("n").and("tag").previousOperation(),
|
||||
sort(DESC, "n")
|
||||
);
|
||||
|
||||
AggregationResults<TagCount> results = mongoTemplate.aggregate(agg, "tags", TagCount.class);
|
||||
List<TagCount> tagCount = results.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listing uses the following algorithm:
|
||||
|
||||
. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`.
|
||||
. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection.
|
||||
. Use the `unwind` operation to generate a new document for each tag within the `tags` array.
|
||||
. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`).
|
||||
. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`.
|
||||
. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order.
|
||||
. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument.
|
||||
|
||||
Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example2]]
|
||||
===== Aggregation Framework Example 2
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class ZipInfo {
|
||||
String id;
|
||||
String city;
|
||||
String state;
|
||||
@Field("pop") int population;
|
||||
@Field("loc") double[] location;
|
||||
}
|
||||
|
||||
class City {
|
||||
String name;
|
||||
int population;
|
||||
}
|
||||
|
||||
class ZipInfoStats {
|
||||
String id;
|
||||
String state;
|
||||
City biggestCity;
|
||||
City smallestCity;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> aggregation = newAggregation(ZipInfo.class,
|
||||
group("state", "city")
|
||||
.sum("population").as("pop"),
|
||||
sort(ASC, "pop", "state", "city"),
|
||||
group("state")
|
||||
.last("city").as("biggestCity")
|
||||
.last("pop").as("biggestPop")
|
||||
.first("city").as("smallestCity")
|
||||
.first("pop").as("smallestPop"),
|
||||
project()
|
||||
.and("state").previousOperation()
|
||||
.and("biggestCity")
|
||||
.nested(bind("name", "biggestCity").and("population", "biggestPop"))
|
||||
.and("smallestCity")
|
||||
.nested(bind("name", "smallestCity").and("population", "smallestPop")),
|
||||
sort(ASC, "state")
|
||||
);
|
||||
|
||||
AggregationResults<ZipInfoStats> result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class);
|
||||
ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0);
|
||||
----
|
||||
|
||||
Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format.
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field.
|
||||
. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled).
|
||||
. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation.
|
||||
. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method.
|
||||
. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example3]]
|
||||
===== Aggregation Framework Example 3
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class StateStats {
|
||||
@Id String id;
|
||||
String state;
|
||||
@Field("totalPop") int totalPopulation;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> agg = newAggregation(ZipInfo.class,
|
||||
group("state").sum("population").as("totalPop"),
|
||||
sort(ASC, previousOperation(), "totalPop"),
|
||||
match(where("totalPop").gte(10 * 1000 * 1000))
|
||||
);
|
||||
|
||||
AggregationResults<StateStats> result = mongoTemplate.aggregate(agg, StateStats.class);
|
||||
List<StateStats> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`.
|
||||
. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order.
|
||||
. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example4]]
|
||||
===== Aggregation Framework Example 4
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.and("netPrice").plus(1).as("netPricePlus1")
|
||||
.and("netPrice").minus(1).as("netPriceMinus1")
|
||||
.and("netPrice").multiply(1.19).as("grossPrice")
|
||||
.and("netPrice").divide(2).as("netPriceDiv2")
|
||||
.and("spaceUnits").mod(2).as("spaceUnitsMod2")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example5]]
|
||||
===== Aggregation Framework Example 5
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("netPrice + 1").as("netPricePlus1")
|
||||
.andExpression("netPrice - 1").as("netPriceMinus1")
|
||||
.andExpression("netPrice / 2").as("netPriceDiv2")
|
||||
.andExpression("netPrice * 1.19").as("grossPrice")
|
||||
.andExpression("spaceUnits % 2").as("spaceUnitsMod2")
|
||||
.andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge")
|
||||
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples.example6]]
|
||||
===== Aggregation Framework Example 6
|
||||
|
||||
This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
double shippingCosts = 1.2;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we can also refer to other fields of the document within the SpEL expression.
|
||||
|
||||
[[mongo.aggregation.examples.example7]]
|
||||
===== Aggregation Framework Example 7
|
||||
|
||||
This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation].
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class InventoryItem {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
}
|
||||
|
||||
public class InventoryItemProjection {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
int discount
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<InventoryItem> agg = newAggregation(InventoryItem.class,
|
||||
project("item").and("discount")
|
||||
.applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250))
|
||||
.then(30)
|
||||
.otherwise(20))
|
||||
.and(ifNull("description", "Unspecified")).as("description")
|
||||
);
|
||||
|
||||
AggregationResults<InventoryItemProjection> result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class);
|
||||
List<InventoryItemProjection> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description.
|
||||
|
||||
As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression.
|
||||
|
||||
.Conditional aggregation projection
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
TypedAggregation<Book> agg = Aggregation.newAggregation(Book.class,
|
||||
project("title")
|
||||
.and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1>
|
||||
.equalToValue("")) <2>
|
||||
.then("$$REMOVE") <3>
|
||||
.otherwiseValueOf("author.middle") <4>
|
||||
)
|
||||
.as("author.middle"));
|
||||
----
|
||||
<1> If the value of the field `author.middle`
|
||||
<2> does not contain a value,
|
||||
<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field.
|
||||
<4> Otherwise, add the field value of `author.middle`.
|
||||
====
|
||||
include::aggregation-framework.adoc[]
|
||||
|
||||
[[mongo-template.index-and-collections]]
|
||||
== Index and Collection Management
|
||||
@@ -3180,121 +2531,6 @@ boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean
|
||||
});
|
||||
----
|
||||
|
||||
[[gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
.JavaConfig setup for a GridFsTemplate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The corresponding XML configuration follows:
|
||||
|
||||
.XML configuration for a GridFsTemplate
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
GridFsResources[] txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
include::gridfs.adoc[]
|
||||
include::tailable-cursors.adoc[]
|
||||
include::change-streams.adoc[]
|
||||
|
||||
@@ -1,6 +1,99 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 3.1.11 (2021-07-16)
|
||||
--------------------------------------
|
||||
* #3689 - Fix Regression in generating queries with nested maps with numeric keys.
|
||||
* #3688 - Multiple maps with numeric keys in a single update produces the wrong query (Regression).
|
||||
|
||||
|
||||
Changes in version 3.2.2 (2021-06-22)
|
||||
-------------------------------------
|
||||
* #3677 - Add missing double quote to GeoJson.java JSDoc header.
|
||||
* #3668 - Projection on the _id field returns wrong result when using `@MongoId` (MongoDB 4.4).
|
||||
* #3666 - Documentation references outdated `Mongo` client.
|
||||
* #3660 - MappingMongoConverter problem: ConversionContext#convert does not try to use custom converters first.
|
||||
* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...].
|
||||
* #3635 - $floor isOrOrNor() return true.
|
||||
* #3633 - NPE in QueryMapper when use Query with `null` as value.
|
||||
|
||||
|
||||
Changes in version 3.1.10 (2021-06-22)
|
||||
--------------------------------------
|
||||
* #3677 - Add missing double quote to GeoJson.java JSDoc header.
|
||||
* #3666 - Documentation references outdated `Mongo` client.
|
||||
* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...].
|
||||
* #3635 - $floor isOrOrNor() return true.
|
||||
* #3633 - NPE in QueryMapper when use Query with `null` as value.
|
||||
|
||||
|
||||
Changes in version 3.2.1 (2021-05-14)
|
||||
-------------------------------------
|
||||
* #3638 - Introduce template method for easier customization of fragments.
|
||||
* #3632 - Fix bullet points in aggregations framework asciidoc.
|
||||
|
||||
|
||||
Changes in version 3.1.9 (2021-05-14)
|
||||
-------------------------------------
|
||||
|
||||
|
||||
Changes in version 3.2.0 (2021-04-14)
|
||||
-------------------------------------
|
||||
* #3623 - `@Aggregation` repository query method causes `NullPointerException` when the result is empty.
|
||||
* #3621 - Upgrade to MongoDB Java Drivers 4.2.3.
|
||||
* #3612 - Upgrade to MongoDB 4.4 on CI.
|
||||
* #3601 - Criteria object not allowing to use field names with underscore in them.
|
||||
* #3583 - Support aggregation expression on fields projection.
|
||||
* #3414 - Criteria or toEquals fail if contains regex [DATAMONGO-2559].
|
||||
|
||||
|
||||
Changes in version 3.1.8 (2021-04-14)
|
||||
-------------------------------------
|
||||
* #3623 - `@Aggregation` repository query method causes `NullPointerException` when the result is empty.
|
||||
* #3601 - Criteria object not allowing to use field names with underscore in them.
|
||||
* #3414 - Criteria or toEquals fail if contains regex [DATAMONGO-2559].
|
||||
|
||||
|
||||
Changes in version 3.0.9.RELEASE (2021-04-14)
|
||||
---------------------------------------------
|
||||
* #3623 - `@Aggregation` repository query method causes `NullPointerException` when the result is empty.
|
||||
* #3609 - SimpleReactiveMongoRepository#saveAll does not populate @Id property if it is immutable.
|
||||
* #3414 - Criteria or toEquals fail if contains regex [DATAMONGO-2559].
|
||||
|
||||
|
||||
Changes in version 3.1.7 (2021-03-31)
|
||||
-------------------------------------
|
||||
* #3613 - Use StringUtils.replace(…) instead of String.replaceAll(…) for mapKeyDotReplacement.
|
||||
* #3609 - SimpleReactiveMongoRepository#saveAll does not populate @Id property if it is immutable.
|
||||
|
||||
|
||||
Changes in version 3.2.0-RC1 (2021-03-31)
|
||||
-----------------------------------------
|
||||
* #3613 - Use StringUtils.replace(…) instead of String.replaceAll(…) for mapKeyDotReplacement.
|
||||
* #3609 - SimpleReactiveMongoRepository#saveAll does not populate @Id property if it is immutable.
|
||||
* #3600 - Rename Embedded annotation -> Unwrapped.
|
||||
* #3583 - Support aggregation expression on fields projection.
|
||||
|
||||
|
||||
Changes in version 3.2.0-M5 (2021-03-17)
|
||||
----------------------------------------
|
||||
* #3592 - Remove @Persistent from entity-scan include filters.
|
||||
* #3590 - Embedded sharding keys are not correctly picked up from the shardKeySource Document.
|
||||
* #3580 - Fix CustomConverter conversion lookup.
|
||||
* #3579 - Upgrade to MongoDB Java Drivers 4.2.2.
|
||||
* #3575 - Introduce ConversionContext and clean up MappingMongoConverter.
|
||||
* #3573 - Json Schema section appears twice in reference documentation.
|
||||
* #3571 - Introduce ConversionContext and clean up MappingMongoConverter.
|
||||
* #3570 - Incorrect class casting cause ClassCastException when save java.util.Collection using MongoTemplate.
|
||||
* #3568 - MongoSocketWriteException may be translated into DataAccessResourceFailureException.
|
||||
* #3566 - Couldn't find PersistentEntity for type java.lang.Object when updating a field with suffix "class".
|
||||
* #3552 - UpdateMapper drops numeric keys in Maps.
|
||||
* #3395 - Derived findBy…IgnoreCaseIn query doesn't return expected results [DATAMONGO-2540].
|
||||
* #3286 - Add possibility to use Collection<Criteria> as parameter in and/or/nor operators [DATAMONGO-2428].
|
||||
* #2911 - ensureNotIterable in MongoTemplate only checks for array type [DATAMONGO-2044].
|
||||
* #590 - DATAMONGO-2044 make ensureNotIterable actually check if object is iterable.
|
||||
|
||||
|
||||
Changes in version 3.1.6 (2021-03-17)
|
||||
-------------------------------------
|
||||
* #3592 - Remove @Persistent from entity-scan include filters.
|
||||
@@ -3357,6 +3450,17 @@ Repository
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 3.1.6 (2020.0.6)
|
||||
Spring Data MongoDB 3.1.14 (2020.0.14)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -20,6 +20,14 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user