Compare commits
101 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b5e2361a8 | ||
|
|
5737f2d19d | ||
|
|
60494a6904 | ||
|
|
ceb561e3e4 | ||
|
|
e2d0220cea | ||
|
|
ea33e8b8c6 | ||
|
|
506b6a2e85 | ||
|
|
7c0eee9e09 | ||
|
|
332e5eb715 | ||
|
|
39ee9b56e2 | ||
|
|
8fb390ee88 | ||
|
|
df1c4496dc | ||
|
|
b808fd3003 | ||
|
|
ed12298271 | ||
|
|
682798325b | ||
|
|
0e69021486 | ||
|
|
ae7e24f1b6 | ||
|
|
94d4fa613c | ||
|
|
39c9593b39 | ||
|
|
6e5f3661a8 | ||
|
|
2bd78e0bf0 | ||
|
|
dd59cdc59a | ||
|
|
7e471e2301 | ||
|
|
0871a43831 | ||
|
|
710e77dabe | ||
|
|
9c996617e8 | ||
|
|
eebd49ab8d | ||
|
|
fb979b1734 | ||
|
|
b5c88938e0 | ||
|
|
4027770701 | ||
|
|
a120ce2bb1 | ||
|
|
a5d40a049d | ||
|
|
f0f12d5296 | ||
|
|
24e06cf219 | ||
|
|
1b83ff0382 | ||
|
|
fe41202f96 | ||
|
|
78235b4799 | ||
|
|
51ece4353b | ||
|
|
51bab838b0 | ||
|
|
361f9daa45 | ||
|
|
56b23a6dbe | ||
|
|
9e15c17e26 | ||
|
|
a3c77a43b6 | ||
|
|
55169e2e11 | ||
|
|
24672e6bdd | ||
|
|
1a46abfbb9 | ||
|
|
61284228dd | ||
|
|
8cb92de1ee | ||
|
|
5d3cc3fa04 | ||
|
|
c0b99740dc | ||
|
|
595bbd3aa7 | ||
|
|
5d2fc31164 | ||
|
|
a9dc0fae69 | ||
|
|
0605c7b753 | ||
|
|
21352a8829 | ||
|
|
58e1d2dbd9 | ||
|
|
4f7821e3c2 | ||
|
|
9dd866e34a | ||
|
|
def6079795 | ||
|
|
f3f537c1a6 | ||
|
|
ad44db386b | ||
|
|
bcc3bf61b6 | ||
|
|
1a28a294d1 | ||
|
|
14623a3655 | ||
|
|
6dcaa31897 | ||
|
|
e57fe346c0 | ||
|
|
7dd94949d5 | ||
|
|
966f971bee | ||
|
|
aa23c579e8 | ||
|
|
6b634d08ce | ||
|
|
b7b61405f9 | ||
|
|
4d65aa7207 | ||
|
|
c129c706a3 | ||
|
|
7823385ac7 | ||
|
|
21fcfe11c2 | ||
|
|
bfe33a446c | ||
|
|
9be50316c3 | ||
|
|
30513267af | ||
|
|
d3d480e79b | ||
|
|
c39ad1bbc4 | ||
|
|
fcdc29df49 | ||
|
|
de7120d8dd | ||
|
|
84df02ae38 | ||
|
|
d6c5907940 | ||
|
|
b2fe54c0a1 | ||
|
|
47a198c688 | ||
|
|
5d9dbda03b | ||
|
|
36d52862bc | ||
|
|
0afbf6fe19 | ||
|
|
b0bf8cb718 | ||
|
|
567a8d9d5b | ||
|
|
ceef18d7a4 | ||
|
|
4f57712f12 | ||
|
|
478396c503 | ||
|
|
aa80d1ad0a | ||
|
|
fd28ab4d33 | ||
|
|
187c80dfcc | ||
|
|
389a3ac066 | ||
|
|
297bd3e3dd | ||
|
|
b11fba3321 | ||
|
|
3c68671d86 |
10
README.md
10
README.md
@@ -6,7 +6,7 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
|
||||
## Getting Help
|
||||
|
||||
For a comprehensive treatmet of all the Spring Data MongoDB features, please refer to:
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](http://static.springsource.org/spring-data/data-mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://static.springsource.org/spring-data/data-mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.1.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
@@ -36,7 +36,7 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.1.RELEASE</version>
|
||||
<version>1.4.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
@@ -57,7 +57,7 @@ MongoTemplate is the central support class for Mongo database operations. It pro
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
To simplify the creation of data repositories Sprin Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
@@ -70,7 +70,7 @@ public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
}
|
||||
```
|
||||
|
||||
The queries issued on execution will be derived from the method name. Exending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
@@ -102,7 +102,7 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||
@@ -124,7 +124,7 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
@@ -135,9 +135,9 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
@@ -150,12 +150,12 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
@@ -182,11 +182,11 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
@@ -237,7 +237,7 @@
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
@@ -251,12 +251,12 @@
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
|
||||
45
pom.xml
45
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.1.0.RELEASE</version>
|
||||
<version>1.3.0.M1</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,19 +29,19 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.6.0.M1</springdata.commons>
|
||||
<mongo>2.10.1</mongo>
|
||||
<springdata.commons>1.7.0.M1</springdata.commons>
|
||||
<mongo>2.11.3</mongo>
|
||||
</properties>
|
||||
|
||||
<developers>
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
<name>Oliver Gierke</name>
|
||||
<email>ogierke at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>ogierke at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Project Lean</role>
|
||||
<role>Project Lead</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
@@ -49,8 +49,8 @@
|
||||
<id>trisberg</id>
|
||||
<name>Thomas Risberg</name>
|
||||
<email>trisberg at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -59,9 +59,9 @@
|
||||
<developer>
|
||||
<id>mpollack</id>
|
||||
<name>Mark Pollack</name>
|
||||
<email>mpollack at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>mpollack at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -70,14 +70,25 @@
|
||||
<developer>
|
||||
<id>jbrisbin</id>
|
||||
<name>Jon Brisbin</name>
|
||||
<email>jbrisbin at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>jbrisbin at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>-6</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>tdarimont</id>
|
||||
<name>Thomas Darimont</name>
|
||||
<email>tdarimont at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<dependencies>
|
||||
@@ -91,7 +102,7 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-lib-milestone</id>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>http://repo.springsource.org/libs-milestone-local</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -12,7 +12,7 @@ Import-Template:
|
||||
org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}",
|
||||
org.bson.*;version="0",
|
||||
org.slf4j.*;version="${slf4j:[=.=.=,+1.0.0)}",
|
||||
org.springframework.*;version="${spring30:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.*;version="${spring:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.data.*;version="${springdata.commons:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.data.mongodb.*;version="${project.version:[=.=.=.=,+1.0.0)}",
|
||||
org.w3c.dom.*;version="0"
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ An example log entry might look like:
|
||||
{
|
||||
"_id" : ObjectId("4d89341a8ef397e06940d5cd"),
|
||||
"applicationId" : "my.application",
|
||||
"name" : "org.springframework.data.mongodb.log4j.AppenderTest",
|
||||
"name" : "org.springframework.data.mongodb.log4j.MongoLog4jAppenderIntegrationTests",
|
||||
"level" : "DEBUG",
|
||||
"timestamp" : ISODate("2011-03-23T16:53:46.778Z"),
|
||||
"properties" : {
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for to use MongoDB as a logging sink.
|
||||
*/
|
||||
package org.springframework.data.mongodb.log4j;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,63 +13,65 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.log4j;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Calendar;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.Mongo;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.MDC;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
*/
|
||||
public class AppenderTest {
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.Mongo;
|
||||
|
||||
private static final String NAME = AppenderTest.class.getName();
|
||||
private Logger log = Logger.getLogger(NAME);
|
||||
private Mongo mongo;
|
||||
private DB db;
|
||||
private String collection;
|
||||
/**
|
||||
* Integration tests for {@link MongoLog4jAppender}.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class MongoLog4jAppenderIntegrationTests {
|
||||
|
||||
static final String NAME = MongoLog4jAppenderIntegrationTests.class.getName();
|
||||
|
||||
Logger log = Logger.getLogger(NAME);
|
||||
Mongo mongo;
|
||||
DB db;
|
||||
String collection;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
try {
|
||||
mongo = new Mongo("localhost", 27017);
|
||||
db = mongo.getDB("logs");
|
||||
Calendar now = Calendar.getInstance();
|
||||
collection = String.valueOf(now.get(Calendar.YEAR)) + String.format("%1$02d", now.get(Calendar.MONTH) + 1);
|
||||
db.getCollection(collection).drop();
|
||||
} catch (UnknownHostException e) {
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
}
|
||||
public void setUp() throws Exception {
|
||||
|
||||
mongo = new Mongo("localhost", 27017);
|
||||
db = mongo.getDB("logs");
|
||||
|
||||
Calendar now = Calendar.getInstance();
|
||||
collection = String.valueOf(now.get(Calendar.YEAR)) + String.format("%1$02d", now.get(Calendar.MONTH) + 1);
|
||||
db.getCollection(collection).drop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLogging() {
|
||||
|
||||
log.debug("DEBUG message");
|
||||
log.info("INFO message");
|
||||
log.warn("WARN message");
|
||||
log.error("ERROR message");
|
||||
|
||||
DBCursor msgs = db.getCollection(collection).find();
|
||||
assertThat(msgs.count(), is(4));
|
||||
|
||||
assertThat(msgs.count(), is(4));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProperties() {
|
||||
|
||||
MDC.put("property", "one");
|
||||
log.debug("DEBUG message");
|
||||
}
|
||||
|
||||
}
|
||||
@@ -10,11 +10,4 @@ log4j.appender.stdout.collectionPattern = %X{year}%X{month}
|
||||
log4j.appender.stdout.applicationId = my.application
|
||||
log4j.appender.stdout.warnOrHigherWriteConcern = FSYNC_SAFE
|
||||
|
||||
log4j.category.org.apache.activemq=ERROR
|
||||
log4j.category.org.springframework.batch=DEBUG
|
||||
log4j.category.org.springframework.data.document.mongodb=DEBUG
|
||||
log4j.category.org.springframework.transaction=INFO
|
||||
|
||||
log4j.category.org.hibernate.SQL=DEBUG
|
||||
# for debugging datasource initialization
|
||||
# log4j.category.test.jdbc=DEBUG
|
||||
log4j.category.org.springframework.data.mongodb=DEBUG
|
||||
|
||||
@@ -1,153 +1,176 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.1.7.187">
|
||||
<scope name="spring-data-mongodb" type="Project">
|
||||
<element name="Filter" type="TypeFilterReferenceOverridden">
|
||||
<element name="org.springframework.data.mongodb.**" type="IncludeTypePattern"/>
|
||||
<context version="7.1.9.205">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
</element>
|
||||
<architecture>
|
||||
<element name="Config" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.config.**" type="WeakTypePattern"/>
|
||||
<element type="Layer" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|GridFS"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Monitoring"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|GridFS" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Monitoring" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Repositories" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.repository.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Repositories">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.repository.**"/>
|
||||
</element>
|
||||
<element name="API" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.repository.*" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="API">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.repository.*"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Query" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Query">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.query.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Implementation" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.support.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Implementation">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.support.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Query"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Config" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.config.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Monitoring" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.monitor.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Monitoring">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.monitor.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="GridFS" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.gridfs.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="GridFS">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.gridfs.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Core" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.core.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.**"/>
|
||||
</element>
|
||||
<element name="Mapping" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.mapping.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Mapping">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapping.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Geospatial" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.geo.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Geospatial">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.geo.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Query" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Query">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.query.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Index" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.index.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.convert.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Core" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.core.**" type="WeakTypePattern"/>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.spel.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Aggregation">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.aggregation.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|SpEL" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Index">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.index.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.core.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Aggregation" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="API" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.data.mongodb.*" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="API">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.*"/>
|
||||
</element>
|
||||
<stereotype name="Public"/>
|
||||
</element>
|
||||
</architecture>
|
||||
<workspace>
|
||||
<element name="src/main/java" type="JavaRootDirectory">
|
||||
<element type="JavaRootDirectory" name="src/main/java">
|
||||
<reference name="Project|spring-data-mongodb::BuildUnit|spring-data-mongodb"/>
|
||||
</element>
|
||||
<element name="target/classes" type="JavaRootDirectory">
|
||||
<element type="JavaRootDirectory" name="target/classes">
|
||||
<reference name="Project|spring-data-mongodb::BuildUnit|spring-data-mongodb"/>
|
||||
</element>
|
||||
</workspace>
|
||||
<physical>
|
||||
<element name="spring-data-mongodb" type="BuildUnit"/>
|
||||
<element type="BuildUnit" name="spring-data-mongodb"/>
|
||||
</physical>
|
||||
</scope>
|
||||
<scope name="External" type="External">
|
||||
<element name="Filter" type="TypeFilter">
|
||||
<element name="**" type="IncludeTypePattern"/>
|
||||
<element name="java.**" type="ExcludeTypePattern"/>
|
||||
<element name="javax.**" type="ExcludeTypePattern"/>
|
||||
<scope type="External" name="External">
|
||||
<element type="TypeFilter" name="Filter">
|
||||
<element type="IncludeTypePattern" name="**"/>
|
||||
<element type="ExcludeTypePattern" name="java.**"/>
|
||||
<element type="ExcludeTypePattern" name="javax.**"/>
|
||||
</element>
|
||||
<architecture>
|
||||
<element name="Spring" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.**" type="IncludeTypePattern"/>
|
||||
<element name="org.springframework.data.**" type="ExcludeTypePattern"/>
|
||||
<element type="Subsystem" name="Spring">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.**"/>
|
||||
<element type="ExcludeTypePattern" name="org.springframework.data.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Spring Data Core" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.data.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Spring Data Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Mongo Java Driver" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="com.mongodb.**" type="IncludeTypePattern"/>
|
||||
<element name="org.bson.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Mongo Java Driver">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mongodb.**"/>
|
||||
<element type="IncludeTypePattern" name="org.bson.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Querydsl" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="com.mysema.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Querydsl">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mysema.query.**"/>
|
||||
</element>
|
||||
</element>
|
||||
</architecture>
|
||||
</scope>
|
||||
<scope name="Global" type="Global">
|
||||
<element name="Configuration" type="Configuration"/>
|
||||
<element name="Filter" type="TypeFilter">
|
||||
<element name="**" type="IncludeTypePattern"/>
|
||||
<scope type="Global" name="Global">
|
||||
<element type="Configuration" name="Configuration"/>
|
||||
<element type="TypeFilter" name="Filter">
|
||||
<element type="IncludeTypePattern" name="**"/>
|
||||
</element>
|
||||
</scope>
|
||||
</context>
|
||||
|
||||
@@ -11,12 +11,13 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.4.0.M1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<validation>1.0.0.GA</validation>
|
||||
<objenesis>1.3</objenesis>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -55,7 +56,7 @@
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
<version>${springdata.commons}</version>
|
||||
@@ -119,6 +120,13 @@
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
<version>${objenesis}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.UncategorizedDataAccessException;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class LazyLoadingException extends UncategorizedDataAccessException {
|
||||
|
||||
private static final long serialVersionUID = -7089224903873220037L;
|
||||
|
||||
/**
|
||||
* @param msg
|
||||
* @param cause
|
||||
*/
|
||||
public LazyLoadingException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,23 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.DB;
|
||||
|
||||
@@ -8,6 +25,7 @@ import com.mongodb.DB;
|
||||
* Interface for factories creating {@link DB} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoDbFactory {
|
||||
|
||||
@@ -27,4 +45,11 @@ public interface MongoDbFactory {
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
DB getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
}
|
||||
|
||||
@@ -31,6 +31,8 @@ import org.springframework.data.mapping.context.MappingContextIsNewStrategyFacto
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -47,6 +49,7 @@ import com.mongodb.Mongo;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration {
|
||||
@@ -58,6 +61,16 @@ public abstract class AbstractMongoConfiguration {
|
||||
*/
|
||||
protected abstract String getDatabaseName();
|
||||
|
||||
/**
|
||||
* Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value
|
||||
* returned by {@link #getDatabaseName()} later on effectively.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected String getAuthenticationDatabaseName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Mongo} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link Mongo} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
@@ -89,14 +102,7 @@ public abstract class AbstractMongoConfiguration {
|
||||
*/
|
||||
@Bean
|
||||
public SimpleMongoDbFactory mongoDbFactory() throws Exception {
|
||||
|
||||
UserCredentials credentials = getUserCredentials();
|
||||
|
||||
if (credentials == null) {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName());
|
||||
} else {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), credentials);
|
||||
}
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials(), getAuthenticationDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -178,8 +184,11 @@ public abstract class AbstractMongoConfiguration {
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(mongoDbFactory(), mongoMappingContext());
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.auditing.DateTimeProvider;
|
||||
import org.springframework.data.domain.AuditorAware;
|
||||
|
||||
/**
|
||||
* Annotation to enable auditing in MongoDB via annotation configuration.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Inherited
|
||||
@Documented
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Import(MongoAuditingRegistrar.class)
|
||||
public @interface EnableMongoAuditing {
|
||||
|
||||
/**
|
||||
* Configures the {@link AuditorAware} bean to be used to lookup the current principal.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String auditorAwareRef() default "";
|
||||
|
||||
/**
|
||||
* Configures whether the creation and modification dates are set. Defaults to {@literal true}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean setDates() default true;
|
||||
|
||||
/**
|
||||
* Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be
|
||||
* used for setting creation and modification dates.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String dateTimeProviderRef() default "";
|
||||
}
|
||||
@@ -70,6 +70,7 @@ import org.w3c.dom.Element;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
@@ -105,6 +106,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
converterBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
converterBuilder.addConstructorArgReference(ctxRef);
|
||||
|
||||
String typeMapperRef = element.getAttribute("type-mapper-ref");
|
||||
if (StringUtils.hasText(typeMapperRef)) {
|
||||
converterBuilder.addPropertyReference("typeMapper", typeMapperRef);
|
||||
}
|
||||
|
||||
if (conversionsDefinition != null) {
|
||||
converterBuilder.addPropertyValue("customConversions", conversionsDefinition);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AnnotationAuditingConfiguration;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
registerIsNewStrategyFactoryIfNecessary(registry);
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AnnotationAuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AnnotationAuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AnnotationAuditingConfiguration must not be null!");
|
||||
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)).addConstructorArgReference(
|
||||
BeanNames.IS_NEW_STRATEGY_FACTORY);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
registerInfrastructureBeanWithId(BeanDefinitionBuilder.rootBeanDefinition(AuditingEventListener.class)
|
||||
.addConstructorArgValue(auditingHandlerDefinition).getRawBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param registry, the {@link BeanDefinitionRegistry} to use to register an {@link IsNewStrategyFactory} to.
|
||||
*/
|
||||
private void registerIsNewStrategyFactoryIfNecessary(BeanDefinitionRegistry registry) {
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.IS_NEW_STRATEGY_FACTORY)) {
|
||||
registry.registerBeanDefinition(BeanNames.IS_NEW_STRATEGY_FACTORY,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(MappingContextIsNewStrategyFactory.class)
|
||||
.addConstructorArgReference(BeanNames.MAPPING_CONTEXT).getBeanDefinition());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 by the original author(s).
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,6 +41,7 @@ import com.mongodb.MongoURI;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
@@ -70,6 +71,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
String uri = element.getAttribute("uri");
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Common setup
|
||||
@@ -92,12 +94,9 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
dbFactoryBuilder.addConstructorArgValue(registerMongoBeanDefinition(element, parserContext));
|
||||
}
|
||||
|
||||
dbname = StringUtils.hasText(dbname) ? dbname : "db";
|
||||
dbFactoryBuilder.addConstructorArgValue(dbname);
|
||||
|
||||
if (userCredentials != null) {
|
||||
dbFactoryBuilder.addConstructorArgValue(userCredentials);
|
||||
}
|
||||
dbFactoryBuilder.addConstructorArgValue(StringUtils.hasText(dbname) ? dbname : "db");
|
||||
dbFactoryBuilder.addConstructorArgValue(userCredentials);
|
||||
dbFactoryBuilder.addConstructorArgValue(element.getAttribute("authentication-dbname"));
|
||||
|
||||
BeanDefinitionBuilder writeConcernPropertyEditorBuilder = getWriteConcernPropertyEditorBuilder();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -33,6 +33,7 @@ import org.w3c.dom.Element;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
abstract class MongoParsingUtils {
|
||||
|
||||
@@ -79,6 +80,8 @@ abstract class MongoParsingUtils {
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "write-timeout", "writeTimeout");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "write-fsync", "writeFsync");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "slave-ok", "slaveOk");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(optionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
|
||||
mongoBuilder.addPropertyValue("mongoOptions", optionsDefBuilder.getBeanDefinition());
|
||||
return true;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.ServerAddress;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
@@ -43,6 +44,11 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
@Override
|
||||
public void setAsText(String replicaSetString) {
|
||||
|
||||
if (!StringUtils.hasText(replicaSetString)) {
|
||||
setValue(null);
|
||||
return;
|
||||
}
|
||||
|
||||
String[] replicaSetStringArray = StringUtils.commaDelimitedListToStringArray(replicaSetString);
|
||||
Set<ServerAddress> serverAddresses = new HashSet<ServerAddress>(replicaSetStringArray.length);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@@ -22,7 +24,6 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -34,9 +35,13 @@ import com.mongodb.MongoException;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Komi Innocent
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
|
||||
@@ -135,12 +140,17 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (Integer.valueOf(1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.ASCENDING));
|
||||
} else if (Integer.valueOf(-1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.DESCENDING));
|
||||
} else if ("2d".equals(value)) {
|
||||
if ("2d".equals(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import com.mongodb.Mongo;
|
||||
* Mongo server administration exposed via JMX annotations
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@ManagedResource(description = "Mongo Admin Operations")
|
||||
public class MongoAdmin implements MongoAdminOperations {
|
||||
@@ -34,6 +35,7 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
private final Mongo mongo;
|
||||
private String username;
|
||||
private String password;
|
||||
private String authenticationDatabaseName;
|
||||
|
||||
public MongoAdmin(Mongo mongo) {
|
||||
Assert.notNull(mongo);
|
||||
@@ -82,7 +84,16 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the authenticationDatabaseName to use to authenticate with the Mongo database.
|
||||
*
|
||||
* @param authenticationDatabaseName The authenticationDatabaseName to use.
|
||||
*/
|
||||
public void setAuthenticationDatabaseName(String authenticationDatabaseName) {
|
||||
this.authenticationDatabaseName = authenticationDatabaseName;
|
||||
}
|
||||
|
||||
DB getDB(String databaseName) {
|
||||
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password));
|
||||
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password), authenticationDatabaseName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,14 +26,14 @@ import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for internal MongoDb classes.
|
||||
* <p/>
|
||||
* <p>
|
||||
* Mainly intended for internal use within the framework.
|
||||
* Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the
|
||||
* framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Graeme Rocher
|
||||
* @author Oliver Gierke
|
||||
* @author Randy Watler
|
||||
* @author Thomas Darimont
|
||||
* @since 1.0
|
||||
*/
|
||||
public abstract class MongoDbUtils {
|
||||
@@ -55,7 +55,7 @@ public abstract class MongoDbUtils {
|
||||
* @return the {@link DB} connection
|
||||
*/
|
||||
public static DB getDB(Mongo mongo, String databaseName) {
|
||||
return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true);
|
||||
return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,15 +67,22 @@ public abstract class MongoDbUtils {
|
||||
* @return the {@link DB} connection
|
||||
*/
|
||||
public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials) {
|
||||
return getDB(mongo, databaseName, credentials, databaseName);
|
||||
}
|
||||
|
||||
public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
String authenticationDatabaseName) {
|
||||
|
||||
Assert.notNull(mongo, "No Mongo instance specified!");
|
||||
Assert.hasText(databaseName, "Database name must be given!");
|
||||
Assert.notNull(credentials, "Credentials must not be null, use UserCredentials.NO_CREDENTIALS!");
|
||||
Assert.hasText(authenticationDatabaseName, "Authentication database name must not be null or empty!");
|
||||
|
||||
return doGetDB(mongo, databaseName, credentials, true);
|
||||
return doGetDB(mongo, databaseName, credentials, true, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate) {
|
||||
private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate,
|
||||
String authenticationDatabaseName) {
|
||||
|
||||
DbHolder dbHolder = (DbHolder) TransactionSynchronizationManager.getResource(mongo);
|
||||
|
||||
@@ -104,14 +111,16 @@ public abstract class MongoDbUtils {
|
||||
DB db = mongo.getDB(databaseName);
|
||||
boolean credentialsGiven = credentials.hasUsername() && credentials.hasPassword();
|
||||
|
||||
synchronized (db) {
|
||||
DB authDb = databaseName.equals(authenticationDatabaseName) ? db : mongo.getDB(authenticationDatabaseName);
|
||||
|
||||
if (credentialsGiven && !db.isAuthenticated()) {
|
||||
synchronized (authDb) {
|
||||
|
||||
if (credentialsGiven && !authDb.isAuthenticated()) {
|
||||
|
||||
String username = credentials.getUsername();
|
||||
String password = credentials.hasPassword() ? credentials.getPassword() : null;
|
||||
|
||||
if (!db.authenticate(username, password == null ? null : password.toCharArray())) {
|
||||
if (!authDb.authenticate(username, password == null ? null : password.toCharArray())) {
|
||||
throw new CannotGetMongoDbConnectionException("Failed to authenticate to database [" + databaseName + "], "
|
||||
+ credentials.toString(), databaseName, credentials);
|
||||
}
|
||||
@@ -131,8 +140,11 @@ public abstract class MongoDbUtils {
|
||||
holderToUse.addDB(databaseName, db);
|
||||
}
|
||||
|
||||
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
|
||||
holderToUse.setSynchronizedWithTransaction(true);
|
||||
// synchronize holder only if not yet synchronized
|
||||
if (!holderToUse.isSynchronizedWithTransaction()) {
|
||||
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
|
||||
holderToUse.setSynchronizedWithTransaction(true);
|
||||
}
|
||||
|
||||
if (holderToUse != dbHolder) {
|
||||
TransactionSynchronizationManager.bindResource(mongo, holderToUse);
|
||||
|
||||
@@ -15,7 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
@@ -24,6 +26,7 @@ import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.CannotGetMongoDbConnectionException;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoOptions;
|
||||
@@ -36,6 +39,7 @@ import com.mongodb.WriteConcern;
|
||||
* @author Thomas Risberg
|
||||
* @author Graeme Rocher
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.0
|
||||
*/
|
||||
public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, DisposableBean,
|
||||
@@ -57,11 +61,38 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
|
||||
}
|
||||
|
||||
public void setReplicaSetSeeds(ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = Arrays.asList(replicaSetSeeds);
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use {@link #setReplicaSetSeeds(ServerAddress[])} instead
|
||||
*
|
||||
* @param replicaPair
|
||||
*/
|
||||
@Deprecated
|
||||
public void setReplicaPair(ServerAddress[] replicaPair) {
|
||||
this.replicaPair = Arrays.asList(replicaPair);
|
||||
this.replicaPair = filterNonNullElementsAsList(replicaPair);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param elements the elements to filter <T>
|
||||
* @return a new unmodifiable {@link List#} from the given elements without nulls
|
||||
*/
|
||||
private <T> List<T> filterNonNullElementsAsList(T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
}
|
||||
|
||||
public void setHost(String host) {
|
||||
@@ -127,15 +158,15 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
|
||||
mongoOptions = new MongoOptions();
|
||||
}
|
||||
|
||||
if (replicaPair != null) {
|
||||
if (!isNullOrEmpty(replicaPair)) {
|
||||
if (replicaPair.size() < 2) {
|
||||
throw new CannotGetMongoDbConnectionException("A replica pair must have two server entries");
|
||||
}
|
||||
mongo = new Mongo(replicaPair.get(0), replicaPair.get(1), mongoOptions);
|
||||
} else if (replicaSetSeeds != null) {
|
||||
} else if (!isNullOrEmpty(replicaSetSeeds)) {
|
||||
mongo = new Mongo(replicaSetSeeds, mongoOptions);
|
||||
} else {
|
||||
String mongoHost = host != null ? host : defaultOptions.getHost();
|
||||
String mongoHost = StringUtils.hasText(host) ? host : defaultOptions.getHost();
|
||||
mongo = port != null ? new Mongo(new ServerAddress(mongoHost, port), mongoOptions) : new Mongo(mongoHost,
|
||||
mongoOptions);
|
||||
}
|
||||
@@ -147,6 +178,10 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
|
||||
this.mongo = mongo;
|
||||
}
|
||||
|
||||
private boolean isNullOrEmpty(Collection<?> elements) {
|
||||
return elements == null || elements.isEmpty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
|
||||
@@ -19,6 +19,9 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
@@ -45,6 +48,8 @@ import com.mongodb.WriteResult;
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Tobias Trelle
|
||||
* @author Chuong Ngo
|
||||
*/
|
||||
public interface MongoOperations {
|
||||
|
||||
@@ -247,7 +252,7 @@ public interface MongoOperations {
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
@@ -261,7 +266,7 @@ public interface MongoOperations {
|
||||
* Query for a list of objects of type T from the specified collection.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
@@ -301,6 +306,57 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> GroupByResults<T> group(Criteria criteria, String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
||||
* inputCollection is derived from the inputType of the aggregation.
|
||||
*
|
||||
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param collectionName The name of the input collection to use for the aggreation.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
||||
* inputCollection is derived from the inputType of the aggregation.
|
||||
*
|
||||
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class.
|
||||
*
|
||||
* @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or
|
||||
* empty.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class.
|
||||
*
|
||||
* @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or
|
||||
* empty.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE
|
||||
*
|
||||
@@ -382,7 +438,7 @@ public interface MongoOperations {
|
||||
* specified type.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
@@ -399,7 +455,7 @@ public interface MongoOperations {
|
||||
* type.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
@@ -422,7 +478,7 @@ public interface MongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
@@ -438,7 +494,7 @@ public interface MongoOperations {
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
@@ -504,7 +560,7 @@ public interface MongoOperations {
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
@@ -557,7 +613,7 @@ public interface MongoOperations {
|
||||
* Insert the object into the specified collection.
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
@@ -595,7 +651,7 @@ public interface MongoOperations {
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
@@ -612,7 +668,7 @@ public interface MongoOperations {
|
||||
* is an 'upsert'.
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of SimpleMongoConverter will be used.
|
||||
* configured otherwise, an instance of MappingMongoConverter will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
@@ -648,6 +704,18 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing object
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
@@ -672,6 +740,19 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object.
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
* with the provided updated document.
|
||||
@@ -696,6 +777,19 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
* with the provided updated document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object.
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult updateMulti(final Query query, final Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,129 +15,91 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.MongoOptions;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
import com.mongodb.MongoOptions;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a MongoOptions instance
|
||||
* A factory bean for construction of a {@link MongoOptions} instance.
|
||||
*
|
||||
* @author Graeme Rocher
|
||||
* @Author Mark Pollack
|
||||
* @author Mark Pollack
|
||||
* @author Mike Saavedra
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, InitializingBean {
|
||||
|
||||
private static final MongoOptions MONGO_OPTIONS = new MongoOptions();
|
||||
/**
|
||||
* number of connections allowed per host will block if run out
|
||||
*/
|
||||
@SuppressWarnings("deprecation")//
|
||||
private final MongoOptions MONGO_OPTIONS = new MongoOptions();
|
||||
|
||||
private int connectionsPerHost = MONGO_OPTIONS.connectionsPerHost;
|
||||
|
||||
/**
|
||||
* multiplier for connectionsPerHost for # of threads that can block if connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an exception will
|
||||
* be throw
|
||||
*/
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
|
||||
/**
|
||||
* max wait time of a blocking thread for a connection
|
||||
*/
|
||||
private int maxWaitTime = MONGO_OPTIONS.maxWaitTime;
|
||||
|
||||
/**
|
||||
* connect timeout in milliseconds. 0 is default and infinite
|
||||
*/
|
||||
private int connectTimeout = MONGO_OPTIONS.connectTimeout;
|
||||
|
||||
/**
|
||||
* socket timeout. 0 is default and infinite
|
||||
*/
|
||||
private int socketTimeout = MONGO_OPTIONS.socketTimeout;
|
||||
|
||||
/**
|
||||
* This controls whether or not to have socket keep alive turned on (SO_KEEPALIVE).
|
||||
*
|
||||
* defaults to false
|
||||
*/
|
||||
public boolean socketKeepAlive = MONGO_OPTIONS.socketKeepAlive;
|
||||
|
||||
/**
|
||||
* this controls whether or not on a connect, the system retries automatically
|
||||
*/
|
||||
private boolean socketKeepAlive = MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = MONGO_OPTIONS.autoConnectRetry;
|
||||
|
||||
private long maxAutoConnectRetryTime = MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
|
||||
/**
|
||||
* This specifies the number of servers to wait for on the write operation, and exception raising behavior.
|
||||
*
|
||||
* Defaults to 0.
|
||||
*/
|
||||
private int writeNumber;
|
||||
|
||||
/**
|
||||
* This controls timeout for write operations in milliseconds.
|
||||
*
|
||||
* Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
*/
|
||||
private int writeTimeout;
|
||||
|
||||
/**
|
||||
* This controls whether or not to fsync.
|
||||
*
|
||||
* Defaults to false.
|
||||
*/
|
||||
private boolean writeFsync;
|
||||
@SuppressWarnings("deprecation") private boolean slaveOk = MONGO_OPTIONS.slaveOk;
|
||||
private boolean ssl;
|
||||
private SSLSocketFactory sslSocketFactory;
|
||||
|
||||
/**
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves.
|
||||
* Configures the maximum number of connections allowed per host until we will block.
|
||||
*
|
||||
* Defaults to false
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private boolean slaveOk = MONGO_OPTIONS.slaveOk;
|
||||
|
||||
/**
|
||||
* number of connections allowed per host will block if run out
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* multiplier for connectionsPerHost for # of threads that can block if connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an exception will
|
||||
* be throw
|
||||
* A multiplier for connectionsPerHost for # of threads that can block a connection. If connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block. If more threads try to block an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* max wait time of a blocking thread for a connection
|
||||
* Max wait time of a blocking thread for a connection.
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* connect timeout in milliseconds. 0 is default and infinite
|
||||
* Configures the connect timeout in milliseconds. Defaults to 0 (infinite time).
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* socket timeout. 0 is default and infinite
|
||||
* Configures the socket timeout. Defaults to 0 (infinite time).
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls whether or not to have socket keep alive
|
||||
* Configures whether or not to have socket keep alive turned on (SO_KEEPALIVE). Defaults to {@literal false}.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
@@ -152,7 +114,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* <li>-1 = don't even report network errors</li>
|
||||
* <li>0 = default, don't call getLastError by default</li>
|
||||
* <li>1 = basic, call getLastError, but don't wait for slaves</li>
|
||||
* <li>2+= wait for slaves</li>
|
||||
* <li>2 += wait for slaves</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param writeNumber the number of servers to wait for on the write operation, and exception raising behavior.
|
||||
@@ -162,33 +124,33 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command.
|
||||
* Configures the timeout for write operations in milliseconds. This defaults to {@literal 0} (indefinite).
|
||||
*
|
||||
* @param writeTimeout Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
* @param writeTimeout
|
||||
*/
|
||||
public void setWriteTimeout(int writeTimeout) {
|
||||
this.writeTimeout = writeTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
* Configures whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to {@literal false}.
|
||||
*
|
||||
* @param writeFsync to fsync on write (true), otherwise false.
|
||||
* @param writeFsync to fsync on <code>write (true)<code>, otherwise {@literal false}.
|
||||
*/
|
||||
public void setWriteFsync(boolean writeFsync) {
|
||||
this.writeFsync = writeFsync;
|
||||
}
|
||||
|
||||
/**
|
||||
* this controls whether or not on a connect, the system retries automatically
|
||||
* Configures whether or not the system retries automatically on a failed connect. This defaults to {@literal false}.
|
||||
*/
|
||||
public void setAutoConnectRetry(boolean autoConnectRetry) {
|
||||
this.autoConnectRetry = autoConnectRetry;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0,
|
||||
* which means to use the default 15s if autoConnectRetry is on.
|
||||
* Configures the maximum amount of time in millisecons to spend retrying to open connection to the same server. This
|
||||
* defaults to {@literal 0}, which means to use the default {@literal 15s} if {@link #autoConnectRetry} is on.
|
||||
*
|
||||
* @param maxAutoConnectRetryTime the maxAutoConnectRetryTime to set
|
||||
*/
|
||||
@@ -197,7 +159,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves. Defaults to {@literal false}.
|
||||
*
|
||||
* @param slaveOk true if the driver should read from secondaries or slaves.
|
||||
*/
|
||||
@@ -205,8 +167,39 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
this.slaveOk = slaveOk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies if the driver should use an SSL connection to Mongo. This defaults to {@literal false}. By default
|
||||
* {@link SSLSocketFactory#getDefault()} will be used. See {@link #setSslSocketFactory(SSLSocketFactory)} if you want
|
||||
* to configure a custom factory.
|
||||
*
|
||||
* @param ssl true if the driver should use an SSL connection.
|
||||
* @see #setSslSocketFactory(SSLSocketFactory)
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link SSLSocketFactory} to use for creating SSL connections to Mongo. Defaults to
|
||||
* {@link SSLSocketFactory#getDefault()}. Implicitly activates {@link #setSsl(boolean)} if a non-{@literal null} value
|
||||
* is given.
|
||||
*
|
||||
* @param sslSocketFactory the sslSocketFactory to use.
|
||||
* @see #setSsl(boolean)
|
||||
*/
|
||||
public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
setSsl(sslSocketFactory != null);
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public void afterPropertiesSet() {
|
||||
|
||||
MONGO_OPTIONS.connectionsPerHost = connectionsPerHost;
|
||||
MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
MONGO_OPTIONS.maxWaitTime = maxWaitTime;
|
||||
@@ -219,18 +212,32 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
MONGO_OPTIONS.w = writeNumber;
|
||||
MONGO_OPTIONS.wtimeout = writeTimeout;
|
||||
MONGO_OPTIONS.fsync = writeFsync;
|
||||
if (ssl) {
|
||||
MONGO_OPTIONS.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
public MongoOptions getObject() {
|
||||
return MONGO_OPTIONS;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoOptions.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
|
||||
*/
|
||||
public boolean isSingleton() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
@@ -53,10 +54,19 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapper;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
@@ -113,6 +123,10 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Oliver Gierke
|
||||
* @author Amol Nayak
|
||||
* @author Patryk Wasik
|
||||
* @author Tobias Trelle
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -134,8 +148,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator();
|
||||
private final QueryMapper mapper;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
|
||||
private WriteConcern writeConcern;
|
||||
private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
@@ -187,8 +202,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Assert.notNull(mongoDbFactory);
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter(mongoDbFactory) : mongoConverter;
|
||||
this.mapper = new QueryMapper(this.mongoConverter);
|
||||
this.queryMapper = new QueryMapper(this.mongoConverter);
|
||||
this.updateMapper = new UpdateMapper(this.mongoConverter);
|
||||
|
||||
// We always have a mapping context in the converter, whether it's a simple one or not
|
||||
mappingContext = this.mongoConverter.getMappingContext();
|
||||
@@ -495,7 +512,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null");
|
||||
}
|
||||
|
||||
DBObject mappedQuery = mapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass));
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass));
|
||||
return execute(collectionName, new FindCallback(mappedQuery)).hasNext();
|
||||
}
|
||||
|
||||
@@ -553,8 +570,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
mongoConverter, entityClass), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
int elementsToSkip = near.getSkip() != null ? near.getSkip() : 0;
|
||||
|
||||
for (Object element : results) {
|
||||
result.add(callback.doWith((DBObject) element));
|
||||
|
||||
/*
|
||||
* As MongoDB currently (2.4.4) doesn't support the skipping of elements in near queries
|
||||
* we skip the elements ourselves to avoid at least the document 2 object mapping overhead.
|
||||
*
|
||||
* @see https://jira.mongodb.org/browse/SERVER-3925
|
||||
*/
|
||||
if (index >= elementsToSkip) {
|
||||
result.add(callback.doWith((DBObject) element));
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
if (elementsToSkip > 0) {
|
||||
// as we skipped some elements we have to calculate the averageDistance ourselves:
|
||||
return new GeoResults<T>(result, near.getMetric());
|
||||
}
|
||||
|
||||
DBObject stats = (DBObject) commandResult.get("stats");
|
||||
@@ -604,7 +639,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
private long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null : mapper.getMappedObject(query.getQueryObject(),
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Long>() {
|
||||
@@ -669,10 +704,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
writer.write(objectToSave, dbDoc);
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -681,6 +715,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param objectToSave
|
||||
* @param writer
|
||||
* @return
|
||||
*/
|
||||
private <T> DBObject toDbObject(T objectToSave, MongoWriter<T> writer) {
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
writer.write(objectToSave, dbDoc);
|
||||
return dbDoc;
|
||||
} else {
|
||||
try {
|
||||
return (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeVersionProperty(Object entity) {
|
||||
|
||||
MongoPersistentEntity<?> mongoPersistentEntity = getPersistentEntity(entity.getClass());
|
||||
@@ -820,19 +874,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
writer.write(objectToSave, dbDoc);
|
||||
} else {
|
||||
try {
|
||||
dbDoc = (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -916,6 +960,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, true, false);
|
||||
}
|
||||
|
||||
public WriteResult upsert(Query query, Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, true, false);
|
||||
}
|
||||
|
||||
public WriteResult updateFirst(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
}
|
||||
@@ -924,6 +972,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, false, false);
|
||||
}
|
||||
|
||||
public WriteResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, false, false);
|
||||
}
|
||||
|
||||
public WriteResult updateMulti(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
}
|
||||
@@ -932,6 +984,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, false, true);
|
||||
}
|
||||
|
||||
public WriteResult updateMulti(final Query query, final Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
protected WriteResult doUpdate(final String collectionName, final Query query, final Update update,
|
||||
final Class<?> entityClass, final boolean upsert, final boolean multi) {
|
||||
|
||||
@@ -940,10 +996,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject()
|
||||
: mapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : mapper.getMappedObject(update.getUpdateObject(),
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
@@ -1061,7 +1117,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass));
|
||||
|
||||
DBObject dboq = mapper.getMappedObject(queryObject, entity);
|
||||
DBObject dboq = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName,
|
||||
entityClass, null, queryObject);
|
||||
@@ -1156,7 +1212,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
if (criteria == null) {
|
||||
dbo.put("cond", null);
|
||||
} else {
|
||||
dbo.put("cond", mapper.getMappedObject(criteria.getCriteriaObject(), null));
|
||||
dbo.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), null));
|
||||
}
|
||||
// If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and
|
||||
// convert to DBObject
|
||||
@@ -1205,6 +1261,64 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType) {
|
||||
return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, String inputCollectionName,
|
||||
Class<O> outputType) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
|
||||
AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
|
||||
mappingContext, queryMapper);
|
||||
return aggregate(aggregation, inputCollectionName, outputType, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, determineCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType) {
|
||||
return aggregate(aggregation, collectionName, outputType, null);
|
||||
}
|
||||
|
||||
protected <O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
AggregationOperationContext context) {
|
||||
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context;
|
||||
DBObject command = aggregation.toDbObject(collectionName, rootContext);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
// map results
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
return new AggregationResults<O>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
protected String replaceWithResourceIfNecessary(String function) {
|
||||
|
||||
String func = function;
|
||||
@@ -1313,57 +1427,28 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter
|
||||
* <p/>
|
||||
* The query document is specified as a standard DBObject and so is the fields specification.
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The query document is specified as a standard {@link DBObject} and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
* @param query the query document that specifies the criteria used to find a record
|
||||
* @param fields the document that specifies the fields to be returned
|
||||
* @param collectionName name of the collection to retrieve the objects from.
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @return the List of converted objects.
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> T doFindOne(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
DBObject mappedQuery = mapper.getMappedObject(query, entity);
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, fields), new ReadDbObjectCallback<T>(readerToUse,
|
||||
entityClass), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is
|
||||
* converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless configured
|
||||
* otherwise, an instance of SimpleMongoConverter will be used. The query document is specified as a standard DBObject
|
||||
* and so is the fields specification. Can be overridden by subclasses.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
* @param query the query document that specifies the criteria used to find a record
|
||||
* @param fields the document that specifies the fields to be returned
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the DBCursor used when iterating over the result set, (apply limits,
|
||||
* skips and so on).
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, entityClass, collectionName));
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mapper.getMappedObject(query, entity), fields), preparer,
|
||||
objectCallback, collectionName);
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
this.mongoConverter, entityClass), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1377,14 +1462,43 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: " + query + " fields: " + fields + " for class: " + entityClass
|
||||
+ " in collection: " + collectionName);
|
||||
}
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is
|
||||
* converted from the MongoDB native representation using an instance of {@see MongoConverter}. The query document is
|
||||
* specified as a standard DBObject and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from.
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindMultiInternal(new FindCallback(mapper.getMappedObject(query, entity), fields), null,
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
protected DBObject convertToDbObject(CollectionOptions collectionOptions) {
|
||||
@@ -1422,7 +1536,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(mapper.getMappedObject(query, entity), fields, sort),
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
}
|
||||
|
||||
@@ -1437,19 +1551,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject updateObj = update.getUpdateObject();
|
||||
for (String key : updateObj.keySet()) {
|
||||
updateObj.put(key, mongoConverter.convertToMongoType(updateObj.get(key)));
|
||||
}
|
||||
|
||||
DBObject mappedQuery = mapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
+ " for class: " + entityClass + " and update: " + updateObj + " in collection: " + collectionName);
|
||||
+ " for class: " + entityClass + " and update: " + mappedUpdate + " in collection: " + collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, updateObj, options),
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
}
|
||||
|
||||
@@ -1717,7 +1827,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
private static final MongoConverter getDefaultMongoConverter(MongoDbFactory factory) {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(factory, new MongoMappingContext());
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext());
|
||||
converter.afterPropertiesSet();
|
||||
return converter;
|
||||
}
|
||||
@@ -1874,6 +1986,35 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
|
||||
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
super(reader, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T doWith(DBObject object) {
|
||||
|
||||
Object idField = object.get(Fields.UNDERSCORE_ID);
|
||||
|
||||
if (!(idField instanceof DBObject)) {
|
||||
return super.doWith(object);
|
||||
}
|
||||
|
||||
DBObject toMap = new BasicDBObject();
|
||||
DBObject nested = (DBObject) idField;
|
||||
toMap.putAll(nested);
|
||||
|
||||
for (String key : object.keySet()) {
|
||||
if (!Fields.UNDERSCORE_ID.equals(key)) {
|
||||
toMap.put(key, object.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
return super.doWith(toMap);
|
||||
}
|
||||
}
|
||||
|
||||
private enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,9 +19,11 @@ import java.net.UnknownHostException;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
@@ -34,6 +36,7 @@ import com.mongodb.WriteConcern;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
|
||||
@@ -41,6 +44,9 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final UserCredentials credentials;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final String authenticationDatabaseName;
|
||||
|
||||
private WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
@@ -50,7 +56,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @param databaseName database name, not be {@literal null} or empty.
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName) {
|
||||
this(mongo, databaseName, UserCredentials.NO_CREDENTIALS, false);
|
||||
this(mongo, databaseName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,7 +67,20 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @param credentials username and password.
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials) {
|
||||
this(mongo, databaseName, credentials, false);
|
||||
this(mongo, databaseName, credentials, false, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an instance of SimpleMongoDbFactory given the Mongo instance, database name, and username/password
|
||||
*
|
||||
* @param mongo Mongo instance, must not be {@literal null}.
|
||||
* @param databaseName Database name, must not be {@literal null} or empty.
|
||||
* @param credentials username and password.
|
||||
* @param authenticationDatabaseName the database name to use for authentication
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
String authenticationDatabaseName) {
|
||||
this(mongo, databaseName, credentials, false, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,12 +91,14 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @throws UnknownHostException
|
||||
* @see MongoURI
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true);
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())),
|
||||
true, uri.getDatabase());
|
||||
}
|
||||
|
||||
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
boolean mongoInstanceCreated) {
|
||||
boolean mongoInstanceCreated, String authenticationDatabaseName) {
|
||||
|
||||
Assert.notNull(mongo, "Mongo must not be null");
|
||||
Assert.hasText(databaseName, "Database name must not be empty");
|
||||
@@ -88,6 +109,12 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.credentials = credentials == null ? UserCredentials.NO_CREDENTIALS : credentials;
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.authenticationDatabaseName = StringUtils.hasText(authenticationDatabaseName) ? authenticationDatabaseName
|
||||
: databaseName;
|
||||
|
||||
Assert.isTrue(this.authenticationDatabaseName.matches("[\\w-]+"),
|
||||
"Authentication database name must only contain letters, numbers, underscores and dashes!");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -115,7 +142,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty.");
|
||||
|
||||
DB db = MongoDbUtils.getDB(mongo, dbName, credentials);
|
||||
DB db = MongoDbUtils.getDB(mongo, dbName, credentials, authenticationDatabaseName);
|
||||
|
||||
if (writeConcern != null) {
|
||||
db.setWriteConcern(writeConcern);
|
||||
@@ -138,4 +165,13 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private static String parseChars(char[] chars) {
|
||||
return chars == null ? null : String.valueOf(chars);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,305 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* An {@code Aggregation} is a representation of a list of aggregation steps to be performed by the MongoDB Aggregation
|
||||
* Framework.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Aggregation {
|
||||
|
||||
public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext();
|
||||
|
||||
private final List<AggregationOperation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(AggregationOperation... operations) {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static <T> TypedAggregation<T> newAggregation(Class<T> type, List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(type, operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static <T> TypedAggregation<T> newAggregation(Class<T> type, AggregationOperation... operations) {
|
||||
return new TypedAggregation<T>(type, operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(AggregationOperation... aggregationOperations) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(aggregationOperations.length > 0, "At least one AggregationOperation has to be provided");
|
||||
|
||||
this.operations = Arrays.asList(aggregationOperations);
|
||||
}
|
||||
|
||||
/**
|
||||
* A pointer to the previous {@link AggregationOperation}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String previousOperation() {
|
||||
return "_id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ProjectionOperation project(String... fields) {
|
||||
return project(fields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} includeing the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ProjectionOperation project(Fields fields) {
|
||||
return new ProjectionOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static UnwindOperation unwind(String field) {
|
||||
return new UnwindOperation(field(field));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GroupOperation group(String... fields) {
|
||||
return group(fields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GroupOperation group(Fields fields) {
|
||||
return new GroupOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given {@link Sort}.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static SortOperation sort(Sort sort) {
|
||||
return new SortOperation(sort);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given sort {@link Direction} and {@code fields}.
|
||||
*
|
||||
* @param direction must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static SortOperation sort(Direction direction, String... fields) {
|
||||
return new SortOperation(new Sort(direction, fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param elementsToSkip must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
public static SkipOperation skip(int elementsToSkip) {
|
||||
return new SkipOperation(elementsToSkip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LimitOperation} limiting the result to the given number of elements.
|
||||
*
|
||||
* @param maxElements must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
public static LimitOperation limit(long maxElements) {
|
||||
return new LimitOperation(maxElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static MatchOperation match(Criteria criteria) {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
* @see Fields#fields(String...)
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static Fields fields(String... fields) {
|
||||
return Fields.fields(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given field name and target reference.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param target must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Fields bind(String name, String target) {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link DBObject}.
|
||||
*
|
||||
* @param inputCollectionName the name of the input collection
|
||||
* @return the {@code DBObject} representing this aggregation
|
||||
*/
|
||||
public DBObject toDbObject(String inputCollectionName, AggregationOperationContext rootContext) {
|
||||
|
||||
AggregationOperationContext context = rootContext;
|
||||
List<DBObject> operationDocuments = new ArrayList<DBObject>(operations.size());
|
||||
|
||||
for (AggregationOperation operation : operations) {
|
||||
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields());
|
||||
}
|
||||
}
|
||||
|
||||
DBObject command = new BasicDBObject("aggregate", inputCollectionName);
|
||||
command.put("pipeline", operationDocuments);
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return SerializationUtils
|
||||
.serializeToJsonSafely(toDbObject("__collection__", new NoOpAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
return new FieldReference(new ExposedField(new AggregationField(name), true));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionNode;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformer;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Interface to type an {@link ExpressionTransformer} to the contained
|
||||
* {@link AggregationExpressionTransformationContext}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
interface AggregationExpressionTransformer extends
|
||||
ExpressionTransformer<AggregationExpressionTransformationContext<ExpressionNode>> {
|
||||
|
||||
/**
|
||||
* A special {@link ExpressionTransformationContextSupport} to be aware of the {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class AggregationExpressionTransformationContext<T extends ExpressionNode> extends
|
||||
ExpressionTransformationContextSupport<T> {
|
||||
|
||||
private final AggregationOperationContext aggregationContext;
|
||||
|
||||
/**
|
||||
* Creates an {@link AggregationExpressionTransformationContext}.
|
||||
*
|
||||
* @param currentNode must not be {@literal null}.
|
||||
* @param parentNode
|
||||
* @param previousOperationObject
|
||||
* @param aggregationContext must not be {@literal null}.
|
||||
*/
|
||||
public AggregationExpressionTransformationContext(T currentNode, ExpressionNode parentNode,
|
||||
DBObject previousOperationObject, AggregationOperationContext context) {
|
||||
|
||||
super(currentNode, parentNode, previousOperationObject);
|
||||
|
||||
Assert.notNull(context, "AggregationOperationContext must not be null!");
|
||||
this.aggregationContext = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link AggregationOperationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOperationContext getAggregationContext() {
|
||||
return aggregationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link FieldReference} for the current {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public FieldReference getFieldReference() {
|
||||
return aggregationContext.getReference(getCurrentNode().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Represents one single operation in an aggregation pipeline.
|
||||
*
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface AggregationOperation {
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationOperation} into a {@link DBObject} by using the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @return the DBObject
|
||||
*/
|
||||
DBObject toDBObject(AggregationOperationContext context);
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* The context for an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface AggregationOperationContext {
|
||||
|
||||
/**
|
||||
* Returns the mapped {@link DBObject}, potentially converting the source considering mapping metadata etc.
|
||||
*
|
||||
* @param dbObject will never be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
DBObject getMappedObject(DBObject dbObject);
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given
|
||||
* field.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(Field field);
|
||||
|
||||
/**
|
||||
* Returns the {@link FieldReference} for the field with the given name or {@literal null} if the context does not
|
||||
* expose a field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(String name);
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Collects the results of executing an aggregation operation.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @param <T> The class in which the results are mapped onto.
|
||||
* @since 1.3
|
||||
*/
|
||||
public class AggregationResults<T> implements Iterable<T> {
|
||||
|
||||
private final List<T> mappedResults;
|
||||
private final DBObject rawResults;
|
||||
private final String serverUsed;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationResults} instance from the given mapped and raw results.
|
||||
*
|
||||
* @param mappedResults must not be {@literal null}.
|
||||
* @param rawResults must not be {@literal null}.
|
||||
*/
|
||||
public AggregationResults(List<T> mappedResults, DBObject rawResults) {
|
||||
|
||||
Assert.notNull(mappedResults);
|
||||
Assert.notNull(rawResults);
|
||||
|
||||
this.mappedResults = Collections.unmodifiableList(mappedResults);
|
||||
this.rawResults = rawResults;
|
||||
this.serverUsed = parseServerUsed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aggregation results.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public List<T> getMappedResults() {
|
||||
return mappedResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unique mapped result. Assumes no result or exactly one.
|
||||
*
|
||||
* @return
|
||||
* @throws IllegalArgumentException in case more than one result is available.
|
||||
*/
|
||||
public T getUniqueMappedResult() {
|
||||
Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!");
|
||||
return mappedResults.size() == 1 ? mappedResults.get(0) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
public Iterator<T> iterator() {
|
||||
return mappedResults.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the server that has been used to perform the aggregation.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getServerUsed() {
|
||||
return serverUsed;
|
||||
}
|
||||
|
||||
private String parseServerUsed() {
|
||||
|
||||
Object object = rawResults.get("serverUsed");
|
||||
return object instanceof String ? (String) object : null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,402 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CompositeIterator;
|
||||
|
||||
/**
|
||||
* Value object to capture the fields exposed by an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
|
||||
private static final List<ExposedField> NO_FIELDS = Collections.emptyList();
|
||||
private static final ExposedFields EMPTY = new ExposedFields(NO_FIELDS, NO_FIELDS);
|
||||
|
||||
private final List<ExposedField> originalFields;
|
||||
private final List<ExposedField> syntheticFields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields from(ExposedField... fields) {
|
||||
return from(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static ExposedFields from(List<ExposedField> fields) {
|
||||
|
||||
ExposedFields result = EMPTY;
|
||||
|
||||
for (ExposedField field : fields) {
|
||||
result = result.and(field);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates synthetic {@link ExposedFields} from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields synthetic(Fields fields) {
|
||||
return createFields(fields, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates non-synthetic {@link ExposedFields} from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields nonSynthetic(Fields fields) {
|
||||
return createFields(fields, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
* @return
|
||||
*/
|
||||
private static ExposedFields createFields(Fields fields, boolean synthetic) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<ExposedField> result = new ArrayList<ExposedField>();
|
||||
|
||||
for (Field field : fields) {
|
||||
result.add(new ExposedField(field, synthetic));
|
||||
}
|
||||
|
||||
return ExposedFields.from(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
*/
|
||||
private ExposedFields(List<ExposedField> originals, List<ExposedField> synthetic) {
|
||||
|
||||
this.originalFields = originals;
|
||||
this.syntheticFields = synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} adding the given {@link ExposedField}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ExposedFields and(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "Exposed field must not be null!");
|
||||
|
||||
ArrayList<ExposedField> result = new ArrayList<ExposedField>();
|
||||
result.addAll(field.synthetic ? syntheticFields : originalFields);
|
||||
result.add(field);
|
||||
|
||||
return new ExposedFields(field.synthetic ? originalFields : result, field.synthetic ? result : syntheticFields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field with the given name or {@literal null} if no field with the given name is available.
|
||||
*
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public ExposedField getField(String name) {
|
||||
|
||||
for (ExposedField field : this) {
|
||||
if (field.canBeReferredToBy(name)) {
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no non-synthetic fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoNonSyntheticFields() {
|
||||
return originalFields.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single non-synthetic field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleNonSyntheticFieldOnly() {
|
||||
return originalFields.size() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoFields() {
|
||||
return exposedFieldsCount() == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleFieldOnly() {
|
||||
return exposedFieldsCount() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
private int exposedFieldsCount() {
|
||||
return originalFields.size() + syntheticFields.size();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<ExposedField> iterator() {
|
||||
|
||||
CompositeIterator<ExposedField> iterator = new CompositeIterator<ExposedField>();
|
||||
iterator.add(syntheticFields.iterator());
|
||||
iterator.add(originalFields.iterator());
|
||||
|
||||
return iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* A single exposed field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class ExposedField implements Field {
|
||||
|
||||
private final boolean synthetic;
|
||||
private final Field field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedField} with the given key.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param synthetic whether the exposed field is synthetic.
|
||||
*/
|
||||
public ExposedField(String key, boolean synthetic) {
|
||||
this(Fields.field(key), synthetic);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedField} for the given {@link Field}.
|
||||
*
|
||||
* @param delegate must not be {@literal null}.
|
||||
* @param synthetic whether the exposed field is synthetic.
|
||||
*/
|
||||
public ExposedField(Field delegate, boolean synthetic) {
|
||||
|
||||
this.field = delegate;
|
||||
this.synthetic = synthetic;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getKey()
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return field.getName();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getTarget()
|
||||
*/
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return field.getTarget();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return field.isAliased();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
* @param input
|
||||
* @return
|
||||
*/
|
||||
public boolean canBeReferredToBy(String input) {
|
||||
return getTarget().equals(input);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("AggregationField: %s, synthetic: %s", field, synthetic);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof ExposedField)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ExposedField that = (ExposedField) obj;
|
||||
|
||||
return this.field.equals(that.field) && this.synthetic == that.synthetic;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * field.hashCode();
|
||||
result += 31 * (synthetic ? 0 : 1);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A reference to an {@link ExposedField}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class FieldReference {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FieldReference} for the given {@link ExposedField}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public FieldReference(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "ExposedField must not be null!");
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return field.synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw, unqualified reference, i.e. the field reference without a {@literal $} prefix.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getRaw() {
|
||||
|
||||
String target = field.getTarget();
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("$%s", getRaw());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof FieldReference)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FieldReference that = (FieldReference) obj;
|
||||
|
||||
return this.field.equals(that.field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return field.hashCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
this.exposedFields = exposedFields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return getReference(field.getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
ExposedField field = exposedFields.getField(name);
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* Abstraction for a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface Field {
|
||||
|
||||
/**
|
||||
* Returns the name of the field.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getName();
|
||||
|
||||
/**
|
||||
* Returns the target of the field. In case no explicit target is available {@link #getName()} should be returned.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getTarget();
|
||||
|
||||
/**
|
||||
* Returns whether the Field is aliased, which means it has a name set different from the target.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean isAliased();
|
||||
}
|
||||
@@ -0,0 +1,293 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Value object to capture a list of {@link Field} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Fields implements Iterable<Field> {
|
||||
|
||||
private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please "
|
||||
+ "customize your field definitions to get to unique field names!";
|
||||
|
||||
public static String UNDERSCORE_ID = "_id";
|
||||
public static String UNDERSCORE_ID_REF = "$_id";
|
||||
|
||||
private final List<Field> fields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Fields from(Field... fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
return new Fields(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for {@link Field}s with the given names.
|
||||
*
|
||||
* @param names must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static Fields fields(String... names) {
|
||||
|
||||
Assert.notNull(names, "Field names must not be null!");
|
||||
|
||||
List<Field> fields = new ArrayList<Field>();
|
||||
|
||||
for (String name : names) {
|
||||
fields.add(field(name));
|
||||
}
|
||||
|
||||
return new Fields(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Field} with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Field field(String name) {
|
||||
return new AggregationField(name);
|
||||
}
|
||||
|
||||
public static Field field(String name, String target) {
|
||||
Assert.hasText(target, "Target must not be null or empty!");
|
||||
return new AggregationField(name, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance using the given {@link Field}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
private Fields(List<Field> fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
|
||||
this.fields = verify(fields);
|
||||
}
|
||||
|
||||
private static final List<Field> verify(List<Field> fields) {
|
||||
|
||||
Map<String, Field> reference = new HashMap<String, Field>();
|
||||
|
||||
for (Field field : fields) {
|
||||
|
||||
String name = field.getName();
|
||||
Field found = reference.get(name);
|
||||
|
||||
if (found != null) {
|
||||
throw new IllegalArgumentException(String.format(AMBIGUOUS_EXCEPTION, name, found, field));
|
||||
}
|
||||
|
||||
reference.put(name, field);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private Fields(Fields existing, Field tail) {
|
||||
|
||||
this.fields = new ArrayList<Field>(existing.fields.size() + 1);
|
||||
this.fields.addAll(existing.fields);
|
||||
this.fields.add(tail);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance with a new {@link Field} of the given name added.
|
||||
*
|
||||
* @param name must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public Fields and(String name) {
|
||||
return and(new AggregationField(name));
|
||||
}
|
||||
|
||||
public Fields and(String name, String target) {
|
||||
return and(new AggregationField(name, target));
|
||||
}
|
||||
|
||||
public Fields and(Field field) {
|
||||
return new Fields(this, field);
|
||||
}
|
||||
|
||||
public Fields and(Fields fields) {
|
||||
|
||||
Fields result = this;
|
||||
|
||||
for (Field field : fields) {
|
||||
result = result.and(field);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public Field getField(String name) {
|
||||
|
||||
for (Field field : fields) {
|
||||
if (field.getName().equals(name)) {
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<Field> iterator() {
|
||||
return fields.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object to encapsulate a field in an aggregation operation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class AggregationField implements Field {
|
||||
|
||||
private final String name;
|
||||
private final String target;
|
||||
|
||||
/**
|
||||
* Creates an aggregation fieldwith the given name. As no target is set explicitly, the name will be used as target
|
||||
* as well.
|
||||
*
|
||||
* @param key
|
||||
*/
|
||||
public AggregationField(String key) {
|
||||
this(key, null);
|
||||
}
|
||||
|
||||
public AggregationField(String name, String target) {
|
||||
|
||||
String nameToSet = cleanUp(name);
|
||||
String targetToSet = cleanUp(target);
|
||||
|
||||
Assert.hasText(nameToSet, "AggregationField name must not be null or empty!");
|
||||
|
||||
if (target == null && name.contains(".")) {
|
||||
this.name = nameToSet.substring(nameToSet.indexOf(".") + 1);
|
||||
this.target = nameToSet;
|
||||
} else {
|
||||
this.name = nameToSet;
|
||||
this.target = targetToSet;
|
||||
}
|
||||
}
|
||||
|
||||
private static final String cleanUp(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return source;
|
||||
}
|
||||
|
||||
int dollarIndex = source.lastIndexOf('$');
|
||||
return dollarIndex == -1 ? source : source.substring(dollarIndex + 1);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getKey()
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getAlias()
|
||||
*/
|
||||
public String getTarget() {
|
||||
return StringUtils.hasText(this.target) ? this.target : this.name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return !getName().equals(getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("AggregationField - name: %s, target: %s", name, target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof AggregationField)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregationField that = (AggregationField) obj;
|
||||
|
||||
return this.name.equals(that.name) && ObjectUtils.nullSafeEquals(this.target, that.target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * name.hashCode();
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(target);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
private final NearQuery nearQuery;
|
||||
|
||||
public GeoNearOperation(NearQuery nearQuery) {
|
||||
|
||||
Assert.notNull(nearQuery);
|
||||
this.nearQuery = nearQuery;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$geoNear", context.getMappedObject(nearQuery.toDBObject()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,375 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $group}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/group/#stage._S_group
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
/**
|
||||
* Holds the non-synthetic fields which are the fields of the group-id structure.
|
||||
*/
|
||||
private final ExposedFields idFields;
|
||||
|
||||
private final List<Operation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} including the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public GroupOperation(Fields fields) {
|
||||
|
||||
this.idFields = ExposedFields.nonSynthetic(fields);
|
||||
this.operations = new ArrayList<Operation>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the given {@link GroupOperation}.
|
||||
*
|
||||
* @param groupOperation must not be {@literal null}.
|
||||
*/
|
||||
protected GroupOperation(GroupOperation groupOperation) {
|
||||
this(groupOperation, Collections.<Operation> emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the given {@link GroupOperation} and the given {@link Operation}s.
|
||||
*
|
||||
* @param groupOperation
|
||||
* @param nextOperations
|
||||
*/
|
||||
private GroupOperation(GroupOperation groupOperation, List<Operation> nextOperations) {
|
||||
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(nextOperations, "NextOperations must not be null!");
|
||||
|
||||
this.idFields = groupOperation.idFields;
|
||||
this.operations = new ArrayList<Operation>(nextOperations.size() + 1);
|
||||
this.operations.addAll(groupOperation.operations);
|
||||
this.operations.addAll(nextOperations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the current one adding the given {@link Operation}.
|
||||
*
|
||||
* @param operation must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected GroupOperation and(Operation operation) {
|
||||
return new GroupOperation(this, Arrays.asList(operation));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link GroupOperation}s on a field.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GroupOperationBuilder {
|
||||
|
||||
private final GroupOperation groupOperation;
|
||||
private final Operation operation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperationBuilder} from the given {@link GroupOperation} and {@link Operation}.
|
||||
*
|
||||
* @param groupOperation
|
||||
* @param operation
|
||||
*/
|
||||
private GroupOperationBuilder(GroupOperation groupOperation, Operation operation) {
|
||||
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(operation, "Operation must not be null!");
|
||||
|
||||
this.groupOperation = groupOperation;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to specify an alias for the new-operation operation.
|
||||
*
|
||||
* @param alias
|
||||
* @return
|
||||
*/
|
||||
public GroupOperation as(String alias) {
|
||||
return this.groupOperation.and(operation.withAlias(alias));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression.
|
||||
* <p>
|
||||
* Count expressions are emulated via {@code $sum: 1}.
|
||||
* <p>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder count() {
|
||||
return newBuilder(GroupOps.SUM, null, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder sum(String reference) {
|
||||
return sum(reference, null);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder sum(String reference, Object value) {
|
||||
return newBuilder(GroupOps.SUM, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder addToSet(String reference) {
|
||||
return addToSet(reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given value.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder addToSet(Object value) {
|
||||
return addToSet(null, value);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder addToSet(String reference, Object value) {
|
||||
return newBuilder(GroupOps.ADD_TO_SET, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $last}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder last(String reference) {
|
||||
return newBuilder(GroupOps.LAST, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $first}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder first(String reference) {
|
||||
return newBuilder(GroupOps.FIRST, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $avg}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder avg(String reference) {
|
||||
return newBuilder(GroupOps.AVG, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder push(String reference) {
|
||||
return push(reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given value.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder push(Object value) {
|
||||
return push(null, value);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder push(String reference, Object value) {
|
||||
return newBuilder(GroupOps.PUSH, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $min}-expression that for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder min(String reference) {
|
||||
return newBuilder(GroupOps.MIN, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $max}-expression that for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder max(String reference) {
|
||||
return newBuilder(GroupOps.MAX, reference, null);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder newBuilder(Keyword keyword, String reference, Object value) {
|
||||
return new GroupOperationBuilder(this, new Operation(keyword, null, reference, value));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = this.idFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
|
||||
for (Operation operation : operations) {
|
||||
fields = fields.and(operation.asField());
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public com.mongodb.DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject operationObject = new BasicDBObject();
|
||||
|
||||
if (idFields.exposesNoNonSyntheticFields()) {
|
||||
|
||||
operationObject.put(Fields.UNDERSCORE_ID, null);
|
||||
|
||||
} else if (idFields.exposesSingleNonSyntheticFieldOnly()) {
|
||||
|
||||
FieldReference reference = context.getReference(idFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, reference.toString());
|
||||
|
||||
} else {
|
||||
|
||||
BasicDBObject inner = new BasicDBObject();
|
||||
|
||||
for (ExposedField field : idFields) {
|
||||
FieldReference reference = context.getReference(field);
|
||||
inner.put(field.getName(), reference.toString());
|
||||
}
|
||||
|
||||
operationObject.put(Fields.UNDERSCORE_ID, inner);
|
||||
}
|
||||
|
||||
for (Operation operation : operations) {
|
||||
operationObject.putAll(operation.toDBObject(context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$group", operationObject);
|
||||
}
|
||||
|
||||
interface Keyword {
|
||||
|
||||
String toString();
|
||||
}
|
||||
|
||||
private static enum GroupOps implements Keyword {
|
||||
|
||||
SUM, LAST, FIRST, PUSH, AVG, MIN, MAX, ADD_TO_SET, COUNT;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
String[] parts = name().split("_");
|
||||
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (String part : parts) {
|
||||
String lowerCase = part.toLowerCase(Locale.US);
|
||||
builder.append(builder.length() == 0 ? lowerCase : StringUtils.capitalize(lowerCase));
|
||||
}
|
||||
|
||||
return "$" + builder.toString();
|
||||
}
|
||||
}
|
||||
|
||||
static class Operation implements AggregationOperation {
|
||||
|
||||
private final Keyword op;
|
||||
private final String key;
|
||||
private final String reference;
|
||||
private final Object value;
|
||||
|
||||
public Operation(Keyword op, String key, String reference, Object value) {
|
||||
|
||||
this.op = op;
|
||||
this.key = key;
|
||||
this.reference = reference;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public Operation withAlias(String key) {
|
||||
return new Operation(op, key, reference, value);
|
||||
}
|
||||
|
||||
public ExposedField asField() {
|
||||
return new ExposedField(key, true);
|
||||
}
|
||||
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(key, new BasicDBObject(op.toString(), getValue(context)));
|
||||
}
|
||||
|
||||
public Object getValue(AggregationOperationContext context) {
|
||||
return reference == null ? value : context.getReference(reference).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Operation [op=" + op + ", key=" + key + ", reference=" + reference + ", value=" + value + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $limit}-operation
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/limit/
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
class LimitOperation implements AggregationOperation {
|
||||
|
||||
private final long maxElements;
|
||||
|
||||
/**
|
||||
* @param maxElements Number of documents to consider.
|
||||
*/
|
||||
public LimitOperation(long maxElements) {
|
||||
|
||||
Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero!");
|
||||
this.maxElements = maxElements;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$limit", maxElements);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $match}-operation
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/match/
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final Criteria criteria;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
*/
|
||||
public MatchOperation(Criteria criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
this.criteria = criteria;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteria.getCriteriaObject()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,774 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $project}-operation. Projection of field to be used in an
|
||||
* {@link Aggregation}. A projection is similar to a {@link Field} inclusion/exclusion but more powerful. It can
|
||||
* generate new fields, change values of given field etc.
|
||||
* <p>
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/project/
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
private static final List<Projection> NONE = Collections.emptyList();
|
||||
private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb "
|
||||
+ "aggregation framework only support the exclusion of the %s field!";
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
/**
|
||||
* Creates a new empty {@link ProjectionOperation}.
|
||||
*/
|
||||
public ProjectionOperation() {
|
||||
this(NONE, NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperation(Fields fields) {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor to allow building up {@link ProjectionOperation} instances from already existing
|
||||
* {@link Projection}s.
|
||||
*
|
||||
* @param current must not be {@literal null}.
|
||||
* @param projections must not be {@literal null}.
|
||||
*/
|
||||
private ProjectionOperation(List<? extends Projection> current, List<? extends Projection> projections) {
|
||||
|
||||
Assert.notNull(current, "Current projections must not be null!");
|
||||
Assert.notNull(projections, "Projections must not be null!");
|
||||
|
||||
this.projections = new ArrayList<ProjectionOperation.Projection>(current.size() + projections.size());
|
||||
this.projections.addAll(current);
|
||||
this.projections.addAll(projections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private ProjectionOperation and(Projection projection) {
|
||||
return new ProjectionOperation(this.projections, Arrays.asList(projection));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} with the current {@link Projection}s replacing the last current one with
|
||||
* the given one.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private ProjectionOperation andReplaceLastOneWith(Projection projection) {
|
||||
|
||||
List<Projection> projections = this.projections.isEmpty() ? Collections.<Projection> emptyList() : this.projections
|
||||
.subList(0, this.projections.size() - 1);
|
||||
return new ProjectionOperation(projections, Arrays.asList(projection));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperationBuilder} to define a projection for the field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder and(String name) {
|
||||
return new ProjectionOperationBuilder(name, this, null);
|
||||
}
|
||||
|
||||
public ExpressionProjectionOperationBuilder andExpression(String expression, Object... params) {
|
||||
return new ExpressionProjectionOperationBuilder(expression, this, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Excludes the given fields from the projection.
|
||||
*
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andExclude(String... fieldNames) {
|
||||
|
||||
for (String fieldName : fieldNames) {
|
||||
Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName),
|
||||
String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID));
|
||||
}
|
||||
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false);
|
||||
return new ProjectionOperation(this.projections, excludeProjections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(String... fieldNames) {
|
||||
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fieldNames), true);
|
||||
return new ProjectionOperation(this.projections, projections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(Fields fields) {
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = null;
|
||||
|
||||
for (Projection projection : projections) {
|
||||
ExposedField field = projection.getExposedField();
|
||||
fields = fields == null ? ExposedFields.from(field) : fields.and(field);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject fieldObject = new BasicDBObject();
|
||||
|
||||
for (Projection projection : projections) {
|
||||
fieldObject.putAll(projection.toDBObject(context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$project", fieldObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for {@link ProjectionOperationBuilder}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static abstract class AbstractProjectionOperationBuilder implements AggregationOperation {
|
||||
|
||||
protected final Object value;
|
||||
protected final ProjectionOperation operation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractProjectionOperationBuilder} fot the given value and {@link ProjectionOperation}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param operation must not be {@literal null}.
|
||||
*/
|
||||
public AbstractProjectionOperationBuilder(Object value, ProjectionOperation operation) {
|
||||
|
||||
Assert.notNull(value, "value must not be null or empty!");
|
||||
Assert.notNull(operation, "ProjectionOperation must not be null!");
|
||||
|
||||
this.value = value;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return this.operation.toDBObject(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the finally to be applied {@link ProjectionOperation} with the given alias.
|
||||
*
|
||||
* @param alias will never be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public abstract ProjectionOperation as(String alias);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ExpressionProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionProjectionOperationBuilder} for the given value, {@link ProjectionOperation} and
|
||||
* parameters.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param operation must not be {@literal null}.
|
||||
* @param parameters
|
||||
*/
|
||||
public ExpressionProjectionOperationBuilder(Object value, ProjectionOperation operation, Object[] parameters) {
|
||||
|
||||
super(value, operation);
|
||||
this.params = parameters;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#as(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
Field expressionField = Fields.field(alias, "expr");
|
||||
return this.operation.and(new ExpressionProjection(expressionField, this.value.toString(), params));
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} based on a SpEL expression.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class ExpressionProjection extends Projection {
|
||||
|
||||
private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer();
|
||||
|
||||
private final String expression;
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionProjection} for the given field, SpEL expression and parameters.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param expression must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
*/
|
||||
public ExpressionProjection(Field field, String expression, Object[] parameters) {
|
||||
|
||||
super(field);
|
||||
|
||||
Assert.hasText(expression, "Expression must not be null!");
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(getExposedField().getName(), TRANSFORMER.transform(expression, context, params));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ProjectionOperation}s on a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private final String name;
|
||||
private final ProjectionOperation operation;
|
||||
private final OperationProjection previousProjection;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperationBuilder} for the field with the given name on top of the given
|
||||
* {@link ProjectionOperation}.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param operation must not be {@literal null}.
|
||||
* @param previousProjection the previous operation projection, may be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperationBuilder(String name, ProjectionOperation operation, OperationProjection previousProjection) {
|
||||
super(name, operation);
|
||||
|
||||
this.name = name;
|
||||
this.operation = operation;
|
||||
this.previousProjection = previousProjection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Projects the result of the previous operation onto the current field. Will automatically add an exclusion for
|
||||
* {@code _id} as what would be held in it by default will now go into the field just projected into.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation previousOperation() {
|
||||
|
||||
return this.operation.andExclude(Fields.UNDERSCORE_ID) //
|
||||
.and(new PreviousOperationProjection(name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a nested field binding for the current field.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation nested(Fields fields) {
|
||||
return this.operation.and(new NestedFieldProjection(name, fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to specify an alias for the previous projection operation.
|
||||
*
|
||||
* @param string
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
if (this.previousProjection != null) {
|
||||
return this.operation.andReplaceLastOneWith(this.previousProjection.withAlias(alias));
|
||||
} else {
|
||||
return this.operation.and(new FieldProjection(Fields.field(alias, name), null));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the given number to the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the value of the given field to the previously mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("add", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the given number to the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("subtract", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the value of the given field to the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the given number with the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the value of the given field with the previously
|
||||
* mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the previously mentioned field by the given number.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("divide", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the value of the given field by the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the previously mentioned field by the given number and returns
|
||||
* the remainder.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("mod", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field
|
||||
* and returns the remainder.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return this.operation.toDBObject(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder project(String operation, Object... values) {
|
||||
OperationProjection projectionOperation = new OperationProjection(Fields.field(name), operation, values);
|
||||
return new ProjectionOperationBuilder(name, this.operation.and(projectionOperation), projectionOperation);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} to pull in the result of the previous operation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class PreviousOperationProjection extends Projection {
|
||||
|
||||
private final String name;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PreviousOperationProjection} for the field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
*/
|
||||
public PreviousOperationProjection(String name) {
|
||||
super(Fields.field(name));
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(name, Fields.UNDERSCORE_ID_REF);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link FieldProjection} to map a result of a previous {@link AggregationOperation} to a new field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class FieldProjection extends Projection {
|
||||
|
||||
private final Field field;
|
||||
private final Object value;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FieldProjection} for the field of the given name, assigning the given value.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param value
|
||||
*/
|
||||
public FieldProjection(String name, Object value) {
|
||||
this(Fields.field(name), value);
|
||||
}
|
||||
|
||||
private FieldProjection(Field field, Object value) {
|
||||
|
||||
super(field);
|
||||
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. Fields are projected as
|
||||
* references with their given name. A field {@code foo} will be projected as: {@code foo : 1 } .
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static List<? extends Projection> from(Fields fields) {
|
||||
return from(fields, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}.
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @param value to use for the given field.
|
||||
* @return
|
||||
*/
|
||||
public static List<FieldProjection> from(Fields fields, Object value) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<FieldProjection> projections = new ArrayList<FieldProjection>();
|
||||
|
||||
for (Field field : fields) {
|
||||
projections.add(new FieldProjection(field, value));
|
||||
}
|
||||
|
||||
return projections;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(field.getName(), renderFieldValue(context));
|
||||
}
|
||||
|
||||
private Object renderFieldValue(AggregationOperationContext context) {
|
||||
|
||||
// implicit reference or explicit include?
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
return reference.isSynthetic() && !field.isAliased() ? 1 : reference.toString();
|
||||
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
// render field as excluded
|
||||
return 0;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
static class OperationProjection extends Projection {
|
||||
|
||||
private final Field field;
|
||||
private final String operation;
|
||||
private final List<Object> values;
|
||||
|
||||
/**
|
||||
* Creates a new {@link OperationProjection} for the given field.
|
||||
*
|
||||
* @param name the name of the field to add the operation projection for, must not be {@literal null} or empty.
|
||||
* @param operation the actual operation key, must not be {@literal null} or empty.
|
||||
* @param values the values to pass into the operation, must not be {@literal null}.
|
||||
*/
|
||||
public OperationProjection(Field field, String operation, Object[] values) {
|
||||
|
||||
super(field);
|
||||
|
||||
Assert.hasText(operation, "Operation must not be null or empty!");
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
this.field = field;
|
||||
this.operation = operation;
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBList values = new BasicDBList();
|
||||
values.addAll(buildReferences(context));
|
||||
|
||||
DBObject inner = new BasicDBObject("$" + operation, values);
|
||||
|
||||
return new BasicDBObject(this.field.getName(), inner);
|
||||
}
|
||||
|
||||
private List<Object> buildReferences(AggregationOperationContext context) {
|
||||
|
||||
List<Object> result = new ArrayList<Object>(values.size());
|
||||
result.add(context.getReference(field.getTarget()).toString());
|
||||
|
||||
for (Object element : values) {
|
||||
result.add(element instanceof Field ? context.getReference((Field) element).toString() : element);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
* @param alias the alias to set
|
||||
* @return
|
||||
*/
|
||||
public OperationProjection withAlias(String alias) {
|
||||
return new OperationProjection(Fields.field(alias, this.field.getName()), operation, values.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
static class NestedFieldProjection extends Projection {
|
||||
|
||||
private final String name;
|
||||
private final Fields fields;
|
||||
|
||||
public NestedFieldProjection(String name, Fields fields) {
|
||||
|
||||
super(Fields.field(name));
|
||||
this.name = name;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
DBObject nestedObject = new BasicDBObject();
|
||||
|
||||
for (Field field : fields) {
|
||||
nestedObject.put(field.getName(), context.getReference(field.getTarget()).toString());
|
||||
}
|
||||
|
||||
return new BasicDBObject(name, nestedObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for {@link Projection} implementations.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static abstract class Projection {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates new {@link Projection} for the given {@link Field}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public Projection(Field field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field exposed by the {@link Projection}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
public ExposedField getExposedField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the current {@link Projection} into a {@link DBObject} based on the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @param context will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public abstract DBObject toDBObject(AggregationOperationContext context);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $skip}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/skip/
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class SkipOperation implements AggregationOperation {
|
||||
|
||||
private final long skipCount;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param skipCount number of documents to skip.
|
||||
*/
|
||||
public SkipOperation(long skipCount) {
|
||||
|
||||
Assert.isTrue(skipCount >= 0, "Skip count must not be negative!");
|
||||
this.skipCount = skipCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$skip", skipCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $sort}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/sort/#pipe._S_sort
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class SortOperation implements AggregationOperation {
|
||||
|
||||
private final Sort sort;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SortOperation} for the given {@link Sort} instance.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
*/
|
||||
public SortOperation(Sort sort) {
|
||||
|
||||
Assert.notNull(sort, "Sort must not be null!");
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public SortOperation and(Direction direction, String... fields) {
|
||||
return and(new Sort(direction, fields));
|
||||
}
|
||||
|
||||
public SortOperation and(Sort sort) {
|
||||
return new SortOperation(this.sort.and(sort));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject object = new BasicDBObject();
|
||||
|
||||
for (Order order : sort) {
|
||||
|
||||
// Check reference
|
||||
FieldReference reference = context.getReference(order.getProperty());
|
||||
object.put(reference.getRaw(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
|
||||
return new BasicDBObject("$sort", object);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,513 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.util.DBObjectUtils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.core.GenericTypeResolver;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionNode;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport;
|
||||
import org.springframework.data.mongodb.core.spel.LiteralNode;
|
||||
import org.springframework.data.mongodb.core.spel.MethodReferenceNode;
|
||||
import org.springframework.data.mongodb.core.spel.OperatorNode;
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.SpelNode;
|
||||
import org.springframework.expression.spel.SpelParserConfiguration;
|
||||
import org.springframework.expression.spel.ast.CompoundExpression;
|
||||
import org.springframework.expression.spel.ast.Indexer;
|
||||
import org.springframework.expression.spel.ast.InlineList;
|
||||
import org.springframework.expression.spel.ast.PropertyOrFieldReference;
|
||||
import org.springframework.expression.spel.standard.SpelExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Renders the AST of a SpEL expression as a MongoDB Aggregation Framework projection expression.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
// TODO: remove explicit usage of a configuration once SPR-11031 gets fixed
|
||||
private static final SpelParserConfiguration CONFIG = new SpelParserConfiguration(false, false);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser(CONFIG);
|
||||
private final List<ExpressionNodeConversion<? extends ExpressionNode>> conversions;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SpelExpressionTransformer}.
|
||||
*/
|
||||
public SpelExpressionTransformer() {
|
||||
|
||||
List<ExpressionNodeConversion<? extends ExpressionNode>> conversions = new ArrayList<ExpressionNodeConversion<? extends ExpressionNode>>();
|
||||
conversions.add(new OperatorNodeConversion(this));
|
||||
conversions.add(new LiteralNodeConversion(this));
|
||||
conversions.add(new IndexerNodeConversion(this));
|
||||
conversions.add(new InlineListNodeConversion(this));
|
||||
conversions.add(new PropertyOrFieldReferenceNodeConversion(this));
|
||||
conversions.add(new CompoundExpressionNodeConversion(this));
|
||||
conversions.add(new MethodReferenceNodeConversion(this));
|
||||
|
||||
this.conversions = Collections.unmodifiableList(conversions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms the given SpEL expression to a corresponding MongoDB expression against the given
|
||||
* {@link AggregationOperationContext} {@code context}.
|
||||
* <p>
|
||||
* Exposes the given @{code params} as <code>[0] ... [n]</code>.
|
||||
*
|
||||
* @param expression must not be {@literal null}
|
||||
* @param context must not be {@literal null}
|
||||
* @param params must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Object transform(String expression, AggregationOperationContext context, Object... params) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
Assert.notNull(context, "AggregationOperationContext must not be null!");
|
||||
Assert.notNull(params, "Parameters must not be null!");
|
||||
|
||||
SpelExpression spelExpression = (SpelExpression) PARSER.parseExpression(expression);
|
||||
ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG);
|
||||
ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state);
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, null, null, context));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionTransformer#transform(org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport)
|
||||
*/
|
||||
public Object transform(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return lookupConversionFor(context.getCurrentNode()).convert(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an appropriate {@link ExpressionNodeConversion} for the given {@code node}. Throws an
|
||||
* {@link IllegalArgumentException} if no conversion could be found.
|
||||
*
|
||||
* @param node
|
||||
* @return the appropriate {@link ExpressionNodeConversion} for the given {@link ExpressionNode}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private ExpressionNodeConversion<ExpressionNode> lookupConversionFor(ExpressionNode node) {
|
||||
|
||||
for (ExpressionNodeConversion<? extends ExpressionNode> candidate : conversions) {
|
||||
if (candidate.supports(node)) {
|
||||
return (ExpressionNodeConversion<ExpressionNode>) candidate;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Unsupported Element: " + node + " Type: " + node.getClass()
|
||||
+ " You probably have a syntax error in your SpEL expression!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract base class for {@link SpelNode} to (Db)-object conversions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static abstract class ExpressionNodeConversion<T extends ExpressionNode> implements
|
||||
AggregationExpressionTransformer {
|
||||
|
||||
private final AggregationExpressionTransformer transformer;
|
||||
private final Class<? extends ExpressionNode> nodeType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNodeConversion}.
|
||||
*
|
||||
* @param transformer must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public ExpressionNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
|
||||
Assert.notNull(transformer, "Transformer must not be null!");
|
||||
|
||||
this.nodeType = (Class<? extends ExpressionNode>) GenericTypeResolver.resolveTypeArgument(this.getClass(),
|
||||
ExpressionNodeConversion.class);
|
||||
this.transformer = transformer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current conversion supports the given {@link ExpressionNode}. By default we will match the
|
||||
* node type against the genric type the subclass types the type parameter to.
|
||||
*
|
||||
* @param node will never be {@literal null}.
|
||||
* @return true if {@literal this} conversion can be applied to the given {@code node}.
|
||||
*/
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return nodeType.equals(node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the transformation for the given {@link ExpressionNode} and the given current context.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected Object transform(ExpressionNode node, AggregationExpressionTransformationContext<?> context) {
|
||||
|
||||
Assert.notNull(node, "ExpressionNode must not be null!");
|
||||
Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!");
|
||||
|
||||
return transform(node, context.getParentNode(), null, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the transformation with the given new {@link ExpressionNode}, new parent node, the current operation and
|
||||
* the previous context.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param parent
|
||||
* @param operation
|
||||
* @param context must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected Object transform(ExpressionNode node, ExpressionNode parent, DBObject operation,
|
||||
AggregationExpressionTransformationContext<?> context) {
|
||||
|
||||
Assert.notNull(node, "ExpressionNode must not be null!");
|
||||
Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!");
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, parent, operation,
|
||||
context.getAggregationContext()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#transform(org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext)
|
||||
*/
|
||||
@Override
|
||||
public Object transform(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return transformer.transform(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the actual conversion from {@link SpelNode} to the corresponding representation for MongoDB.
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
protected abstract Object convert(AggregationExpressionTransformationContext<T> context);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts arithmetic operations.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class OperatorNodeConversion extends ExpressionNodeConversion<OperatorNode> {
|
||||
|
||||
public OperatorNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<OperatorNode> context) {
|
||||
|
||||
OperatorNode currentNode = context.getCurrentNode();
|
||||
|
||||
DBObject operationObject = createOperationObjectAndAddToPreviousArgumentsIfNecessary(context, currentNode);
|
||||
Object leftResult = transform(currentNode.getLeft(), currentNode, operationObject, context);
|
||||
|
||||
if (currentNode.isUnaryMinus()) {
|
||||
return convertUnaryMinusOp(context, leftResult);
|
||||
}
|
||||
|
||||
// we deliberately ignore the RHS result
|
||||
transform(currentNode.getRight(), currentNode, operationObject, context);
|
||||
|
||||
return operationObject;
|
||||
}
|
||||
|
||||
private DBObject createOperationObjectAndAddToPreviousArgumentsIfNecessary(
|
||||
AggregationExpressionTransformationContext<OperatorNode> context, OperatorNode currentNode) {
|
||||
|
||||
DBObject nextDbObject = new BasicDBObject(currentNode.getMongoOperator(), new BasicDBList());
|
||||
|
||||
if (!context.hasPreviousOperation()) {
|
||||
return nextDbObject;
|
||||
}
|
||||
|
||||
if (context.parentIsSameOperation()) {
|
||||
|
||||
// same operator applied in a row e.g. 1 + 2 + 3 carry on with the operation and render as $add: [1, 2 ,3]
|
||||
nextDbObject = context.getPreviousOperationObject();
|
||||
} else if (!currentNode.isUnaryOperator()) {
|
||||
|
||||
// different operator -> add context object for next level to list if arguments of previous expression
|
||||
context.addToPreviousOperation(nextDbObject);
|
||||
}
|
||||
|
||||
return nextDbObject;
|
||||
}
|
||||
|
||||
private Object convertUnaryMinusOp(ExpressionTransformationContextSupport<OperatorNode> context, Object leftResult) {
|
||||
|
||||
Object result = leftResult instanceof Number ? leftResult
|
||||
: new BasicDBObject("$multiply", dbList(-1, leftResult));
|
||||
|
||||
if (leftResult != null && context.hasPreviousOperation()) {
|
||||
context.addToPreviousOperation(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isMathematicalOperation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts indexed expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class IndexerNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public IndexerNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return context.addToPreviousOrReturn(context.getCurrentNode().getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(Indexer.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts in-line list expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class InlineListNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public InlineListNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
ExpressionNode currentNode = context.getCurrentNode();
|
||||
|
||||
if (!currentNode.hasChildren()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// just take the first item
|
||||
return transform(currentNode.getChild(0), currentNode, null, context);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(InlineList.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts property or field reference expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PropertyOrFieldReferenceNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public PropertyOrFieldReferenceNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#convert(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionTransformationContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
String fieldReference = context.getFieldReference().toString();
|
||||
return context.addToPreviousOrReturn(fieldReference);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(PropertyOrFieldReference.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts literal expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class LiteralNodeConversion extends ExpressionNodeConversion<LiteralNode> {
|
||||
|
||||
public LiteralNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object convert(AggregationExpressionTransformationContext<LiteralNode> context) {
|
||||
|
||||
LiteralNode node = context.getCurrentNode();
|
||||
Object value = node.getValue();
|
||||
|
||||
if (context.hasPreviousOperation()) {
|
||||
|
||||
if (node.isUnaryMinus(context.getParentNode())) {
|
||||
// unary minus operator
|
||||
return NumberUtils.convertNumberToTargetClass(((Number) value).doubleValue() * -1,
|
||||
(Class<Number>) value.getClass()); // retain type, e.g. int to -int
|
||||
}
|
||||
|
||||
return context.addToPreviousOperation(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(org.springframework.expression.spel.SpelNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isLiteral();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method reference expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MethodReferenceNodeConversion extends ExpressionNodeConversion<MethodReferenceNode> {
|
||||
|
||||
public MethodReferenceNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<MethodReferenceNode> context) {
|
||||
|
||||
MethodReferenceNode node = context.getCurrentNode();
|
||||
List<Object> args = new ArrayList<Object>();
|
||||
|
||||
for (ExpressionNode childNode : node) {
|
||||
args.add(transform(childNode, context));
|
||||
}
|
||||
|
||||
return context.addToPreviousOrReturn(new BasicDBObject(node.getMethodName(), dbList(args.toArray())));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method compound expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class CompoundExpressionNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public CompoundExpressionNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
ExpressionNode currentNode = context.getCurrentNode();
|
||||
|
||||
if (currentNode.hasfirstChildNotOfType(Indexer.class)) {
|
||||
// we have a property path expression like: foo.bar -> render as reference
|
||||
return context.getFieldReference().toString();
|
||||
}
|
||||
|
||||
return context.addToPreviousOrReturn(currentNode.getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(CompoundExpression.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} aware of a particular type and a {@link MappingContext} to potentially translate
|
||||
* property references into document field names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class TypeBasedAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final Class<?> type;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and
|
||||
* {@link QueryMapper}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
public TypeBasedAggregationOperationContext(Class<?> type,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext, QueryMapper mapper) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
Assert.notNull(mapper, "QueryMapper must not be null!");
|
||||
|
||||
this.type = type;
|
||||
this.mappingContext = mappingContext;
|
||||
this.mapper = mapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return mapper.getMappedObject(dbObject, mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
PropertyPath.from(field.getTarget(), type);
|
||||
return getReferenceFor(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(name, type);
|
||||
|
||||
return getReferenceFor(field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A {@code TypedAggregation} is a special {@link Aggregation} that holds information of the input aggregation type.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class TypedAggregation<I> extends Aggregation {
|
||||
|
||||
private final Class<I> inputType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public TypedAggregation(Class<I> inputType, AggregationOperation... operations) {
|
||||
|
||||
super(operations);
|
||||
|
||||
Assert.notNull(inputType, "Input type must not be null!");
|
||||
this.inputType = inputType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the input type for the {@link Aggregation}.
|
||||
*
|
||||
* @return the inputType will never be {@literal null}.
|
||||
*/
|
||||
public Class<I> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $unwind}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/unwind/#pipe._S_unwind
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class UnwindOperation implements AggregationOperation {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UnwindOperation} for the given {@link Field}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public UnwindOperation(Field field) {
|
||||
|
||||
Assert.notNull(field);
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$unwind", context.getReference(field).toString());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for the MongoDB aggregation framework.
|
||||
* @since 1.3
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
@@ -19,6 +19,7 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
@@ -39,6 +40,7 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
@@ -85,12 +87,13 @@ public class CustomConversions {
|
||||
|
||||
Assert.notNull(converters);
|
||||
|
||||
this.readingPairs = new HashSet<ConvertiblePair>();
|
||||
this.writingPairs = new HashSet<ConvertiblePair>();
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.cache = new HashMap<Class<?>, HashMap<Class<?>, CacheValue>>();
|
||||
|
||||
this.converters = new ArrayList<Object>();
|
||||
this.converters.addAll(converters);
|
||||
this.converters.add(CustomToStringConverter.INSTANCE);
|
||||
this.converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
@@ -98,8 +101,8 @@ public class CustomConversions {
|
||||
this.converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
this.converters.add(URLToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToURLConverter.INSTANCE);
|
||||
this.converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
this.converters.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
this.converters.addAll(converters);
|
||||
|
||||
for (Object c : this.converters) {
|
||||
registerConversion(c);
|
||||
@@ -237,6 +240,7 @@ public class CustomConversions {
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
return getCustomTarget(source, expectedTargetType, writingPairs);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Wrapper value object for a {@link BasicDBObject} to be able to access raw values by {@link MongoPersistentProperty}
|
||||
* references. The accessors will transparently resolve nested document values that a {@link MongoPersistentProperty}
|
||||
* might refer to through a path expression in field names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
*
|
||||
* @param dbObject must be a {@link BasicDBObject} effectively, must not be {@literal null}.
|
||||
*/
|
||||
public DBObjectAccessor(DBObject dbObject) {
|
||||
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts the given value into the backing {@link DBObject} based on the coordinates defined through the given
|
||||
* {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist
|
||||
* of path traversals so we might need to create intermediate {@link BasicDBObject}s.
|
||||
*
|
||||
* @param prop must not be {@literal null}.
|
||||
* @param value
|
||||
*/
|
||||
public void put(MongoPersistentProperty prop, Object value) {
|
||||
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
while (parts.hasNext()) {
|
||||
|
||||
String part = parts.next();
|
||||
|
||||
if (parts.hasNext()) {
|
||||
BasicDBObject nestedDbObject = new BasicDBObject();
|
||||
dbObject.put(part, nestedDbObject);
|
||||
dbObject = nestedDbObject;
|
||||
} else {
|
||||
dbObject.put(part, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value the given {@link MongoPersistentProperty} refers to. By default this will be a direct field but
|
||||
* the method will also transparently resolve nested values the {@link MongoPersistentProperty} might refer to through
|
||||
* a path expression in the field name metadata.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
|
||||
result = source.get(parts.next());
|
||||
|
||||
if (parts.hasNext()) {
|
||||
source = getAsMap(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Used to resolve associations annotated with {@link org.springframework.data.mongodb.core.mapping.DBRef}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface DbRefResolver {
|
||||
|
||||
/**
|
||||
* @param property will never be {@literal null}.
|
||||
* @param callback will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object resolveDbRef(MongoPersistentProperty property, DbRefResolverCallback callback);
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} instance for the given {@link org.springframework.data.mongodb.core.mapping.DBRef}
|
||||
* annotation, {@link MongoPersistentEntity} and id.
|
||||
*
|
||||
* @param annotation will never be {@literal null}.
|
||||
* @param entity will never be {@literal null}.
|
||||
* @param id will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity<?> entity,
|
||||
Object id);
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Callback interface to be used in conjunction with {@link DbRefResolver}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface DbRefResolverCallback {
|
||||
|
||||
/**
|
||||
* Resolve the final object for the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object resolve(MongoPersistentProperty property);
|
||||
}
|
||||
@@ -0,0 +1,282 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.objenesis.Objenesis;
|
||||
import org.objenesis.ObjenesisStd;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.core.SpringVersion;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* A {@link DbRefResolver} that resolves {@link org.springframework.data.mongodb.core.mapping.DBRef}s by delegating to a
|
||||
* {@link DbRefResolverCallback} than is able to generate lazy loading proxies.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final boolean IS_SPRING_4_OR_BETTER = SpringVersion.getVersion().startsWith("4");
|
||||
private static final boolean OBJENESIS_PRESENT = ClassUtils.isPresent("org.objenesis.Objenesis", null);
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolver#resolveDbRef(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.convert.DbRefResolverCallback)
|
||||
*/
|
||||
@Override
|
||||
public Object resolveDbRef(MongoPersistentProperty property, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
if (isLazyDbRef(property)) {
|
||||
return createLazyLoadingProxy(property, callback);
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolver#created(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation,
|
||||
MongoPersistentEntity<?> entity, Object id) {
|
||||
|
||||
DB db = mongoDbFactory.getDb();
|
||||
db = annotation != null && StringUtils.hasText(annotation.db()) ? mongoDbFactory.getDb(annotation.db()) : db;
|
||||
|
||||
return new DBRef(db, entity.getCollection(), id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a proxy for the given {@link MongoPersistentProperty} using the given {@link DbRefResolverCallback} to
|
||||
* eventually resolve the value of the property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback) {
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
Class<?> propertyType = property.getType();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, exceptionTranslator, callback);
|
||||
|
||||
if (propertyType.isInterface()) {
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
proxyFactory.setProxyTargetClass(true);
|
||||
proxyFactory.setTargetClass(propertyType);
|
||||
|
||||
if (IS_SPRING_4_OR_BETTER || !OBJENESIS_PRESENT) {
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
return ObjenesisProxyEnhancer.enhanceAndGet(proxyFactory, propertyType, interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
private boolean isLazyDbRef(MongoPersistentProperty property) {
|
||||
return property.getDBRef() != null && property.getDBRef().lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a
|
||||
* {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private Object result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Override
|
||||
public Object invoke(MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy)
|
||||
*/
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
return ReflectionUtils.isObjectMethod(method) ? method.invoke(obj, args) : method.invoke(ensureResolved(), args);
|
||||
}
|
||||
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true; // Object is guaranteed to be resolved after serializations
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (!resolved) {
|
||||
|
||||
try {
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!", translatedException);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean isResolved() {
|
||||
return resolved;
|
||||
}
|
||||
|
||||
public Object getResult() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Static class to accomodate optional dependency on Objenesis.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class ObjenesisProxyEnhancer {
|
||||
|
||||
private static final Objenesis OBJENESIS = new ObjenesisStd(true);
|
||||
|
||||
public static Object enhanceAndGet(ProxyFactory proxyFactory, Class<?> type,
|
||||
org.springframework.cglib.proxy.MethodInterceptor interceptor) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
|
||||
Factory factory = (Factory) OBJENESIS.newInstance(enhancer.createClass());
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,16 +18,19 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.DefaultTypeMapper;
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.TypeAliasAccessor;
|
||||
import org.springframework.data.convert.TypeInformationMapper;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -37,33 +40,43 @@ import com.mongodb.DBObject;
|
||||
* respectively.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implements MongoTypeMapper {
|
||||
|
||||
public static final String DEFAULT_TYPE_KEY = "_class";
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings("rawtypes")//
|
||||
private static final TypeInformation<List> LIST_TYPE_INFO = ClassTypeInformation.from(List.class);
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings("rawtypes")//
|
||||
private static final TypeInformation<Map> MAP_TYPE_INFO = ClassTypeInformation.from(Map.class);
|
||||
private String typeKey = DEFAULT_TYPE_KEY;
|
||||
|
||||
private final TypeAliasAccessor<DBObject> accessor;
|
||||
private final String typeKey;
|
||||
|
||||
public DefaultMongoTypeMapper() {
|
||||
this(DEFAULT_TYPE_KEY, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(DEFAULT_TYPE_KEY);
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey));
|
||||
this.typeKey = typeKey;
|
||||
this(typeKey, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this.typeKey = typeKey;
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays
|
||||
.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey), mappers);
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), null, mappers);
|
||||
}
|
||||
|
||||
private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor<DBObject> accessor,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, List<? extends TypeInformationMapper> mappers) {
|
||||
|
||||
super(accessor, mappingContext, mappers);
|
||||
|
||||
this.typeKey = typeKey;
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -74,6 +87,31 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
return typeKey == null ? false : typeKey.equals(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#writeTypeRestrictions(java.util.Set)
|
||||
*/
|
||||
@Override
|
||||
public void writeTypeRestrictions(DBObject result, Set<Class<?>> restrictedTypes) {
|
||||
|
||||
if (restrictedTypes == null || restrictedTypes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
BasicDBList restrictedMappedTypes = new BasicDBList();
|
||||
|
||||
for (Class<?> restrictedType : restrictedTypes) {
|
||||
|
||||
Object typeAlias = getAliasFor(ClassTypeInformation.from(restrictedType));
|
||||
|
||||
if (typeAlias != null) {
|
||||
restrictedMappedTypes.add(typeAlias);
|
||||
}
|
||||
}
|
||||
|
||||
accessor.writeTypeTo(result, new BasicDBObject("$in", restrictedMappedTypes));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object)
|
||||
*/
|
||||
@@ -83,6 +121,7 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeAliasAccessor} to store aliases in a {@link DBObject}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -57,11 +57,9 @@ import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
@@ -72,6 +70,7 @@ import com.mongodb.DBRef;
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin
|
||||
* @author Patrik Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
@@ -79,8 +78,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
|
||||
protected final MongoDbFactory mongoDbFactory;
|
||||
protected final QueryMapper idMapper;
|
||||
protected final DbRefResolver dbRefResolver;
|
||||
protected ApplicationContext applicationContext;
|
||||
protected boolean useFieldAccessOnly = true;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
@@ -89,21 +88,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
private SpELContext spELContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
public MappingMongoConverter(DbRefResolver dbRefResolver,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(ConversionServiceFactory.createDefaultConversionService());
|
||||
|
||||
Assert.notNull(mongoDbFactory);
|
||||
Assert.notNull(mappingContext);
|
||||
Assert.notNull(dbRefResolver, "DbRefResolver must not be null!");
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
this.mappingContext = mappingContext;
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
@@ -111,6 +110,19 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.spELContext = new SpELContext(DBObjectPropertyAccessor.INSTANCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
*
|
||||
* @deprecated use the constructor taking a {@link DbRefResolver} instead.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link MongoTypeMapper} to be used to add type information to {@link DBObject}s created by the
|
||||
* converter and how to lookup type information from {@link DBObject}s when reading them. Uses a
|
||||
@@ -124,6 +136,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
mappingContext) : typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoConverter#getTypeMapper()
|
||||
*/
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
return this.typeMapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default we don't do
|
||||
* any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire
|
||||
@@ -224,7 +245,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
parent);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, Object parent) {
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final Object parent) {
|
||||
|
||||
final DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(dbo, spELContext);
|
||||
|
||||
@@ -251,11 +272,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
// Handle associations
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
Object obj = getValueInternal(inverseProp, dbo, evaluator, result);
|
||||
|
||||
Object obj = dbRefResolver.resolveDbRef(inverseProp, new DbRefResolverCallback() {
|
||||
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return getValueInternal(property, dbo, evaluator, parent);
|
||||
}
|
||||
});
|
||||
|
||||
wrapper.setProperty(inverseProp, obj);
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
@@ -356,8 +384,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
try {
|
||||
Object id = wrapper.getProperty(idProperty, Object.class, fieldAccessOnly);
|
||||
dbo.put("_id", idMapper.convertId(id));
|
||||
} catch (ConversionException ignored) {
|
||||
}
|
||||
} catch (ConversionException ignored) {}
|
||||
}
|
||||
|
||||
// Write the properties
|
||||
@@ -376,7 +403,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (!conversions.isSimpleType(propertyObj.getClass())) {
|
||||
writePropertyInternal(propertyObj, dbo, prop);
|
||||
} else {
|
||||
writeSimpleInternal(propertyObj, dbo, prop.getFieldName());
|
||||
writeSimpleInternal(propertyObj, dbo, prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -401,26 +428,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
String name = prop.getFieldName();
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(dbo);
|
||||
|
||||
TypeInformation<?> valueType = ClassTypeInformation.from(obj.getClass());
|
||||
TypeInformation<?> type = prop.getTypeInformation();
|
||||
|
||||
if (valueType.isCollectionLike()) {
|
||||
DBObject collectionInternal = createCollection(asCollection(obj), prop);
|
||||
dbo.put(name, collectionInternal);
|
||||
accessor.put(prop, collectionInternal);
|
||||
return;
|
||||
}
|
||||
|
||||
if (valueType.isMap()) {
|
||||
DBObject mapDbObj = createMap((Map<Object, Object>) obj, prop);
|
||||
dbo.put(name, mapDbObj);
|
||||
accessor.put(prop, mapDbObj);
|
||||
return;
|
||||
}
|
||||
|
||||
if (prop.isDbReference()) {
|
||||
DBRef dbRefObj = createDBRef(obj, prop.getDBRef());
|
||||
if (null != dbRefObj) {
|
||||
dbo.put(name, dbRefObj);
|
||||
accessor.put(prop, dbRefObj);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -429,18 +457,20 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Class<?> basicTargetType = conversions.getCustomWriteTarget(obj.getClass(), null);
|
||||
|
||||
if (basicTargetType != null) {
|
||||
dbo.put(name, conversionService.convert(obj, basicTargetType));
|
||||
accessor.put(prop, conversionService.convert(obj, basicTargetType));
|
||||
return;
|
||||
}
|
||||
|
||||
BasicDBObject propDbObj = new BasicDBObject();
|
||||
Object existingValue = accessor.get(prop);
|
||||
BasicDBObject propDbObj = existingValue instanceof BasicDBObject ? (BasicDBObject) existingValue
|
||||
: new BasicDBObject();
|
||||
addCustomTypeKeyIfNecessary(type, obj, propDbObj);
|
||||
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? mappingContext
|
||||
.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
|
||||
writeInternal(obj, propDbObj, entity);
|
||||
dbo.put(name, propDbObj);
|
||||
accessor.put(prop, propDbObj);
|
||||
}
|
||||
|
||||
private boolean isSubtype(Class<?> left, Class<?> right) {
|
||||
@@ -658,6 +688,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
dbObject.put(key, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
private void writeSimpleInternal(Object value, DBObject dbObject, MongoPersistentProperty property) {
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(dbObject);
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type.
|
||||
* Returns the converted value if so. If not, we perform special enum handling or simply return the value as is.
|
||||
@@ -733,10 +768,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("Cannot create a reference to an object with a NULL id.");
|
||||
}
|
||||
|
||||
DB db = mongoDbFactory.getDb();
|
||||
db = dbref != null && StringUtils.hasText(dbref.db()) ? mongoDbFactory.getDb(dbref.db()) : db;
|
||||
|
||||
return new DBRef(db, targetEntity.getCollection(), idMapper.convertId(id));
|
||||
return dbRefResolver.createDbRef(dbref, targetEntity, idMapper.convertId(id));
|
||||
}
|
||||
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
|
||||
@@ -776,7 +808,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, ((DBRef) dbObjItem).fetch(),
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem),
|
||||
parent));
|
||||
} else if (dbObjItem instanceof DBObject) {
|
||||
items.add(read(componentType, (DBObject) dbObjItem, parent));
|
||||
@@ -824,7 +856,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (value instanceof DBObject) {
|
||||
map.put(key, read(valueType, (DBObject) value, parent));
|
||||
} else if (value instanceof DBRef) {
|
||||
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, ((DBRef) value).fetch()));
|
||||
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, readRef((DBRef) value)));
|
||||
} else {
|
||||
Class<?> valueClass = valueType == null ? null : valueType.getType();
|
||||
map.put(key, getPotentiallyConvertedSimpleRead(value, valueClass));
|
||||
@@ -956,7 +988,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final DBObject source;
|
||||
private final DBObjectAccessor source;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
private final Object parent;
|
||||
|
||||
@@ -969,7 +1001,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(evaluator);
|
||||
|
||||
this.source = source;
|
||||
this.source = new DBObjectAccessor(source);
|
||||
this.evaluator = evaluator;
|
||||
this.parent = parent;
|
||||
}
|
||||
@@ -981,7 +1013,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public <T> T getPropertyValue(MongoPersistentProperty property) {
|
||||
|
||||
String expression = property.getSpelExpression();
|
||||
Object value = expression != null ? evaluator.evaluate(expression) : source.get(property.getFieldName());
|
||||
Object value = expression != null ? evaluator.evaluate(expression) : source.get(property);
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
@@ -1034,7 +1066,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (conversions.hasCustomReadTarget(value.getClass(), rawType)) {
|
||||
return (T) conversionService.convert(value, rawType);
|
||||
} else if (value instanceof DBRef) {
|
||||
return (T) (rawType.equals(DBRef.class) ? value : read(type, ((DBRef) value).fetch(), parent));
|
||||
return (T) (rawType.equals(DBRef.class) ? value : read(type, readRef((DBRef) value), parent));
|
||||
} else if (value instanceof BasicDBList) {
|
||||
return (T) readCollectionOrArray(type, (BasicDBList) value, parent);
|
||||
} else if (value instanceof DBObject) {
|
||||
@@ -1043,4 +1075,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (T) getPotentiallyConvertedSimpleRead(value, rawType);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
* @param ref
|
||||
* @return
|
||||
*/
|
||||
DBObject readRef(DBRef ref) {
|
||||
return ref.fetch();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
@@ -26,9 +27,17 @@ import com.mongodb.DBObject;
|
||||
* Central Mongo specific converter interface which combines {@link MongoWriter} and {@link MongoReader}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoConverter extends
|
||||
EntityConverter<MongoPersistentEntity<?>, MongoPersistentProperty, Object, DBObject>, MongoWriter<Object>,
|
||||
EntityReader<Object, DBObject> {
|
||||
|
||||
/**
|
||||
* Returns thw {@link TypeMapper} being used to write type information into {@link DBObject}s created with that
|
||||
* converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
MongoTypeMapper getTypeMapper();
|
||||
}
|
||||
|
||||
@@ -24,8 +24,11 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Wrapper class to contain useful converters for the usage with Mongo.
|
||||
*
|
||||
@@ -147,4 +150,15 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
public static enum DBObjectToStringConverter implements Converter<DBObject, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(DBObject source) {
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -32,4 +34,14 @@ public interface MongoTypeMapper extends TypeMapper<DBObject> {
|
||||
* @return
|
||||
*/
|
||||
boolean isTypeKey(String key);
|
||||
|
||||
/**
|
||||
* Writes type restrictions to the given {@link DBObject}. This usually results in an {@code $in}-clause to be
|
||||
* generated that restricts the type-key (e.g. {@code _class}) to be in the set of type aliases for the given
|
||||
* {@code restrictedTypes}.
|
||||
*
|
||||
* @param result must not be {@literal null}
|
||||
* @param restrictedTypes must not be {@literal null}
|
||||
*/
|
||||
void writeTypeRestrictions(DBObject result, Set<Class<?>> restrictedTypes);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -46,7 +48,6 @@ import com.mongodb.DBRef;
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
@@ -74,9 +75,10 @@ public class QueryMapper {
|
||||
* @param entity can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public DBObject getMappedObject(DBObject query, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (Keyword.isKeyword(query)) {
|
||||
if (isNestedKeyword(query)) {
|
||||
return getMappedKeyword(new Keyword(query), entity);
|
||||
}
|
||||
|
||||
@@ -84,8 +86,32 @@ public class QueryMapper {
|
||||
|
||||
for (String key : query.keySet()) {
|
||||
|
||||
// TODO: remove one once QueryMapper can work with Query instances directly
|
||||
if (Query.isRestrictedTypeKey(key)) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Set<Class<?>> restrictedTypes = (Set<Class<?>>) query.get(key);
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isKeyword(key)) {
|
||||
result.putAll(getMappedKeyword(new Keyword(query, key), entity));
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
result.put(field.getMappedKey(), getMappedValue(query.get(key), field));
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
result.put(newKey, getMappedKeyword(field, keyword));
|
||||
} else {
|
||||
result.put(newKey, getMappedValue(field, rawValue));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -94,59 +120,62 @@ public class QueryMapper {
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
* @param query the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param keyword the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Keyword query, MongoPersistentEntity<?> entity) {
|
||||
private DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (query.key.matches(N_OR_PATTERN)) {
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
|
||||
Iterable<?> conditions = (Iterable<?>) query.value;
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
newConditions.add(getMappedObject((DBObject) condition, entity));
|
||||
newConditions.add(condition instanceof DBObject ? getMappedObject((DBObject) condition, entity)
|
||||
: convertSimpleOrDBObject(condition, entity));
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, newConditions);
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, convertSimpleOrDBObject(query.value, entity));
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mapped keyword considered defining a criteria for the given property.
|
||||
*
|
||||
* @param keyword
|
||||
* @param property
|
||||
* @param keyword
|
||||
* @return
|
||||
*/
|
||||
public DBObject getMappedKeyword(Keyword keyword, Field property) {
|
||||
private DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
|
||||
if (property.isAssociation()) {
|
||||
convertAssociation(keyword.value, property.getProperty());
|
||||
}
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = keyword.getValue();
|
||||
|
||||
return new BasicDBObject(keyword.key, getMappedValue(keyword.value, property.with(keyword.key)));
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mapped value for the given source object assuming it's a value for the given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param source the source object to be mapped
|
||||
* @param value the source object to be mapped
|
||||
* @param property the property the value is a value for
|
||||
* @param newKey the key the value will be bound to eventually
|
||||
* @return
|
||||
*/
|
||||
private Object getMappedValue(Object source, Field key) {
|
||||
private Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if (key.isIdField()) {
|
||||
if (documentField.isIdField()) {
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
DBObject valueDbo = (DBObject) source;
|
||||
if (value instanceof DBObject) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
@@ -157,22 +186,25 @@ public class QueryMapper {
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) source, null);
|
||||
return getMappedObject((DBObject) value, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
|
||||
} else {
|
||||
return convertId(source);
|
||||
return convertId(value);
|
||||
}
|
||||
}
|
||||
|
||||
if (key.isAssociation()) {
|
||||
return Keyword.isKeyword(source) ? getMappedKeyword(new Keyword(source), key) : convertAssociation(source,
|
||||
key.getProperty());
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(source, key.getPropertyEntity());
|
||||
if (documentField.isAssociation()) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -185,18 +217,30 @@ public class QueryMapper {
|
||||
private Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof BasicDBList) {
|
||||
return converter.convertToMongoType(source);
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
return getMappedObject((DBObject) source, entity);
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given source Object to a mongo type with the type information of the original source type omitted.
|
||||
* Subclasses may overwrite this method to retain the type information of the source type on the resulting mongo type.
|
||||
*
|
||||
* @param source
|
||||
* @param entity
|
||||
* @return the converted mongo type or null if source is null
|
||||
*/
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return converter.convertToMongoType(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given source assuming it's actually an association to anoter object.
|
||||
* Converts the given source assuming it's actually an association to another object.
|
||||
*
|
||||
* @param source
|
||||
* @param property
|
||||
@@ -243,7 +287,40 @@ public class QueryMapper {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return converter.convertToMongoType(id);
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link DBObject} with a keyword key.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
|
||||
if (!(candidate instanceof BasicDBObject)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BasicDBObject dbObject = (BasicDBObject) candidate;
|
||||
Set<String> keys = dbObject.keySet();
|
||||
|
||||
if (keys.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isKeyword(keys.iterator().next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the
|
||||
* set of registered keywords returned by {@link #getKeywords()}.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isKeyword(String candidate) {
|
||||
return candidate.startsWith("$");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -253,36 +330,49 @@ public class QueryMapper {
|
||||
*/
|
||||
private static class Keyword {
|
||||
|
||||
String key;
|
||||
Object value;
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
Keyword(Object source) {
|
||||
private final String key;
|
||||
private final Object value;
|
||||
|
||||
Assert.isInstanceOf(DBObject.class, source);
|
||||
public Keyword(DBObject source, String key) {
|
||||
this.key = key;
|
||||
this.value = source.get(key);
|
||||
}
|
||||
|
||||
DBObject value = (DBObject) source;
|
||||
public Keyword(DBObject dbObject) {
|
||||
|
||||
Assert.isTrue(value.keySet().size() == 1, "Keyword must have a single key only!");
|
||||
Set<String> keys = dbObject.keySet();
|
||||
Assert.isTrue(keys.size() == 1, "Can only use a single value DBObject!");
|
||||
|
||||
this.key = value.keySet().iterator().next();
|
||||
this.value = value.get(key);
|
||||
this.key = keys.iterator().next();
|
||||
this.value = dbObject.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given value actually represents a keyword. If this returns {@literal true} it's safe to call
|
||||
* the constructor.
|
||||
* Returns whether the current keyword is the {@code $exists} keyword.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
static boolean isKeyword(Object value) {
|
||||
public boolean isExists() {
|
||||
return "$exists".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
if (!(value instanceof DBObject)) {
|
||||
return false;
|
||||
}
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
DBObject dbObject = (DBObject) value;
|
||||
return dbObject.keySet().size() == 1 && dbObject.keySet().iterator().next().startsWith("$");
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T getValue() {
|
||||
return (T) value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -298,7 +388,7 @@ public class QueryMapper {
|
||||
protected final String name;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Field} without meta-information but the given name.
|
||||
* Creates a new {@link DocumentField} without meta-information but the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -309,7 +399,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link Field} with the given name.
|
||||
* Returns a new {@link DocumentField} with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -365,7 +455,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension of {@link Field} to be backed with mapping metadata.
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
private final MongoWriter<?> converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdateMapper} using the given {@link MongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public UpdateMapper(MongoConverter converter) {
|
||||
|
||||
super(converter);
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given source object to a mongo type retaining the original type information of the source type on the
|
||||
* mongo type.
|
||||
*
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#delegateConvertToMongoType(java.lang.Object,
|
||||
* org.springframework.data.mongodb.core.mapping.MongoPersistentEntity)
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Spring Data MongoDB specific converter infrastructure.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
@@ -131,7 +131,7 @@ public class GeoResults<T> implements Iterable<GeoResult<T>> {
|
||||
private static Distance calculateAverageDistance(List<? extends GeoResult<?>> results, Metric metric) {
|
||||
|
||||
if (results.isEmpty()) {
|
||||
return new Distance(0, null);
|
||||
return new Distance(0, metric);
|
||||
}
|
||||
|
||||
double averageDistance = 0;
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for MongoDB geo-spatial queries.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
@@ -27,6 +27,7 @@ import java.lang.annotation.Target;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@@ -78,4 +79,12 @@ public @interface CompoundIndex {
|
||||
* @return
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.lang.annotation.Target;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -50,4 +51,12 @@ public @interface Indexed {
|
||||
* @return
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ import com.mongodb.util.JSON;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
@@ -108,7 +109,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
DBObject definition = (DBObject) JSON.parse(index.def());
|
||||
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background());
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created compound index " + index);
|
||||
@@ -143,7 +144,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
DBObject definition = new BasicDBObject(persistentProperty.getFieldName(), direction);
|
||||
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background());
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created property index " + index);
|
||||
@@ -192,9 +193,11 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* @param unique whether it shall be a unique index
|
||||
* @param dropDups whether to drop duplicates
|
||||
* @param sparse sparse or not
|
||||
* @param background whether the index will be created in the background
|
||||
* @param expireAfterSeconds the time to live for documents in the collection
|
||||
*/
|
||||
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
|
||||
boolean dropDups, boolean sparse, boolean background) {
|
||||
boolean dropDups, boolean sparse, boolean background, int expireAfterSeconds) {
|
||||
|
||||
DBObject opts = new BasicDBObject();
|
||||
opts.put("name", name);
|
||||
@@ -203,6 +206,10 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
opts.put("unique", unique);
|
||||
opts.put("background", background);
|
||||
|
||||
if (expireAfterSeconds != -1) {
|
||||
opts.put("expireAfterSeconds", expireAfterSeconds);
|
||||
}
|
||||
|
||||
mongoDbFactory.getDb().getCollection(collection).ensureIndex(indexDefinition, opts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for MongoDB document indexing.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@@ -24,6 +25,7 @@ import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.expression.BeanFactoryAccessor;
|
||||
import org.springframework.context.expression.BeanFactoryResolver;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
@@ -35,6 +37,7 @@ import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -43,11 +46,12 @@ import org.springframework.util.StringUtils;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!";
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private final String collection;
|
||||
private final SpelExpressionParser parser;
|
||||
private final StandardEvaluationContext context;
|
||||
@@ -136,6 +140,60 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* As a general note: An implicit id property has a name that matches "id" or "_id". An explicit id property is one
|
||||
* that is annotated with @see {@link Id}. The property id is updated according to the following rules: 1) An id
|
||||
* property which is defined explicitly takes precedence over an implicitly defined id property. 2) In case of any
|
||||
* ambiguity a @see {@link MappingException} is thrown.
|
||||
*
|
||||
* @param property - the new id property candidate
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNull(MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(property);
|
||||
|
||||
if (!property.isIdProperty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty currentIdProperty = getIdProperty();
|
||||
|
||||
boolean currentIdPropertyIsSet = currentIdProperty != null;
|
||||
@SuppressWarnings("null")
|
||||
boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet ? currentIdProperty.isExplicitIdProperty() : false;
|
||||
boolean newIdPropertyIsExplicit = property.isExplicitIdProperty();
|
||||
|
||||
if (!currentIdPropertyIsSet) {
|
||||
return property;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("null")
|
||||
Field currentIdPropertyField = currentIdProperty.getField();
|
||||
|
||||
if (newIdPropertyIsExplicit && currentIdPropertyIsExplicit) {
|
||||
throw new MappingException(String.format(
|
||||
"Attempt to add explicit id property %s but already have an property %s registered "
|
||||
+ "as explicit id. Check your mapping configuration!", property.getField(), currentIdPropertyField));
|
||||
|
||||
} else if (newIdPropertyIsExplicit && !currentIdPropertyIsExplicit) {
|
||||
// explicit id property takes precedence over implicit id property
|
||||
return property;
|
||||
|
||||
} else if (!newIdPropertyIsExplicit && currentIdPropertyIsExplicit) {
|
||||
// no id property override - current property is explicitly defined
|
||||
|
||||
} else {
|
||||
throw new MappingException(String.format(
|
||||
"Attempt to add id property %s but already have an property %s registered "
|
||||
+ "as id. Check your mapping configuration!", property.getField(), currentIdPropertyField));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler to collect {@link MongoPersistentProperty} instances and check that each of them is mapped to a distinct
|
||||
* field name.
|
||||
|
||||
@@ -24,6 +24,7 @@ import java.util.Set;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -38,6 +39,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty> implements
|
||||
MongoPersistentProperty {
|
||||
@@ -109,6 +111,15 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return SUPPORTED_ID_PROPERTY_NAMES.contains(field.getName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitIdProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean isExplicitIdProperty() {
|
||||
return isAnnotationPresent(Id.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key to be used to store the value of the property inside a Mongo {@link DBObject}.
|
||||
*
|
||||
@@ -117,7 +128,18 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
public String getFieldName() {
|
||||
|
||||
if (isIdProperty()) {
|
||||
return ID_FIELD_NAME;
|
||||
|
||||
if (owner == null) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
|
||||
if (owner.getIdProperty() == null) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
|
||||
if (owner.isIdProperty(this)) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
}
|
||||
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 by the original author(s).
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,7 +27,8 @@ import org.springframework.data.annotation.Reference;
|
||||
* An annotation that indicates the annotated field is to be stored using a {@link com.mongodb.DBRef}.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @authot Oliver Gierke
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -41,4 +42,11 @@ public @interface DBRef {
|
||||
* @return
|
||||
*/
|
||||
String db() default "";
|
||||
|
||||
/**
|
||||
* Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean lazy() default false;
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
/**
|
||||
* SPI interface to determine how to name document fields in cases the field name is not manually defined.
|
||||
*
|
||||
* @see Field
|
||||
* @see DocumentField
|
||||
* @see PropertyNameFieldNamingStrategy
|
||||
* @see CamelCaseAbbreviatingFieldNamingStrategy
|
||||
* @since 1.3
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,8 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
|
||||
/**
|
||||
@@ -23,6 +25,7 @@ import org.springframework.data.mapping.PersistentProperty;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoPersistentProperty extends PersistentProperty<MongoPersistentProperty> {
|
||||
|
||||
@@ -48,6 +51,14 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
boolean isDbReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is explicitly marked as an identifier property of the owning {@link PersistentEntity}.
|
||||
* A property is an explicit id property if it is annotated with @see {@link Id}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean isExplicitIdProperty();
|
||||
|
||||
/**
|
||||
* Returns the {@link DBRef} if the property is a reference.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Mapping event callback infrastructure for the MongoDB document-to-object mapping subsystem.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for the MongoDB document-to-object mapping subsystem.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for MongoDB map-reduce operations.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapreduce;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,6 +39,10 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple
|
||||
* criteria. Static import of the 'Criteria.where' method will improve readability.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Criteria implements CriteriaDefinition {
|
||||
|
||||
@@ -396,34 +400,54 @@ public class Criteria implements CriteriaDefinition {
|
||||
|
||||
/**
|
||||
* Creates an 'or' criteria using the $or operator for all of the provided criteria
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $or operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #orOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria orOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$or").is(bsonList));
|
||||
return this;
|
||||
return registerCriteriaChainElement(new Criteria("$or").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a 'nor' criteria using the $nor operator for all of the provided criteria
|
||||
* Creates a 'nor' criteria using the $nor operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $nor operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #norOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria norOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$nor").is(bsonList));
|
||||
return this;
|
||||
return registerCriteriaChainElement(new Criteria("$nor").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'and' criteria using the $and operator for all of the provided criteria
|
||||
* Creates an 'and' criteria using the $and operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $and operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #andOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria andOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$and").is(bsonList));
|
||||
return registerCriteriaChainElement(new Criteria("$and").is(bsonList));
|
||||
}
|
||||
|
||||
private Criteria registerCriteriaChainElement(Criteria criteria) {
|
||||
|
||||
if (lastOperatorWasNot()) {
|
||||
throw new IllegalArgumentException("operator $not is not allowed around criteria chain element: "
|
||||
+ criteria.getCriteriaObject());
|
||||
} else {
|
||||
criteriaChain.add(criteria);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -468,6 +492,7 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
if (isValue != NOT_SET) {
|
||||
queryCriteria.put(this.key, this.isValue);
|
||||
|
||||
@@ -73,7 +73,7 @@ public class Field {
|
||||
*/
|
||||
public Field position(String field, int value) {
|
||||
|
||||
Assert.hasText(field, "Field must not be null or empty!");
|
||||
Assert.hasText(field, "DocumentField must not be null or empty!");
|
||||
|
||||
postionKey = field;
|
||||
positionValue = value;
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.mongodb.core.geo.CustomMetric;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.Metric;
|
||||
@@ -29,6 +30,7 @@ import com.mongodb.DBObject;
|
||||
* Builder class to build near-queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class NearQuery {
|
||||
|
||||
@@ -38,6 +40,7 @@ public class NearQuery {
|
||||
private Metric metric;
|
||||
private boolean spherical;
|
||||
private Integer num;
|
||||
private Integer skip;
|
||||
|
||||
/**
|
||||
* Creates a new {@link NearQuery}.
|
||||
@@ -116,7 +119,7 @@ public class NearQuery {
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the number of results to return.
|
||||
* Configures the maximum number of results to return.
|
||||
*
|
||||
* @param num
|
||||
* @return
|
||||
@@ -126,6 +129,29 @@ public class NearQuery {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the number of results to skip.
|
||||
*
|
||||
* @param skip
|
||||
* @return
|
||||
*/
|
||||
public NearQuery skip(int skip) {
|
||||
this.skip = skip;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link Pageable} to use.
|
||||
*
|
||||
* @param pageable
|
||||
* @return
|
||||
*/
|
||||
public NearQuery with(Pageable pageable) {
|
||||
this.num = pageable.getOffset() + pageable.getPageSize();
|
||||
this.skip = pageable.getOffset();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the max distance results shall have from the configured origin. If a {@link Metric} was set before the given
|
||||
* value will be interpreted as being a value in that metric. E.g.
|
||||
@@ -290,9 +316,18 @@ public class NearQuery {
|
||||
*/
|
||||
public NearQuery query(Query query) {
|
||||
this.query = query;
|
||||
this.skip = query.getSkip();
|
||||
this.num = query.getLimit();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of elements to skip.
|
||||
*/
|
||||
public Integer getSkip() {
|
||||
return skip;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link DBObject} built by the {@link NearQuery}.
|
||||
*
|
||||
|
||||
@@ -19,8 +19,11 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -34,9 +37,13 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Query {
|
||||
|
||||
private final static String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
|
||||
private final Set<Class<?>> restrictedTypes = new HashSet<Class<?>>();
|
||||
private LinkedHashMap<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private Field fieldSpec;
|
||||
private Sort sort;
|
||||
@@ -54,8 +61,7 @@ public class Query {
|
||||
return new Query(criteria);
|
||||
}
|
||||
|
||||
public Query() {
|
||||
}
|
||||
public Query() {}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Query} using the given {@link Criteria}.
|
||||
@@ -161,13 +167,46 @@ public class Query {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the restrictedTypes
|
||||
*/
|
||||
public Set<Class<?>> getRestrictedTypes() {
|
||||
return restrictedTypes == null ? Collections.<Class<?>> emptySet() : restrictedTypes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Restricts the query to only return documents instances that are exactly of the given types.
|
||||
*
|
||||
* @param type may not be {@literal null}
|
||||
* @param additionalTypes may not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Query restrict(Class<?> type, Class<?>... additionalTypes) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(additionalTypes, "AdditionalTypes must not be null");
|
||||
|
||||
restrictedTypes.add(type);
|
||||
for (Class<?> additionalType : additionalTypes) {
|
||||
restrictedTypes.add(additionalType);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getQueryObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
dbo.putAll(cl);
|
||||
}
|
||||
|
||||
if (!restrictedTypes.isEmpty()) {
|
||||
dbo.put(RESTRICTED_TYPES_KEY, getRestrictedTypes());
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -266,4 +305,17 @@ public class Query {
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given key is the one used to hold the type restriction information.
|
||||
*
|
||||
* @deprecated don't call this method as the restricted type handling will undergo some significant changes going
|
||||
* forward.
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public static boolean isRestrictedTypeKey(String key) {
|
||||
return RESTRICTED_TYPES_KEY.equals(key);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.DBObject;
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Becca Gaspard
|
||||
*/
|
||||
public class Update {
|
||||
|
||||
@@ -90,6 +91,18 @@ public class Update {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $setOnInsert update modifier
|
||||
*
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public Update setOnInsert(String key, Object value) {
|
||||
addMultiFieldOperation("$setOnInsert", key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $unset update modifier
|
||||
*
|
||||
@@ -217,22 +230,23 @@ public class Update {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected void addMultiFieldOperation(String operator, String key, Object value) {
|
||||
|
||||
Object existingValue = this.modifierOps.get(operator);
|
||||
LinkedHashMap<String, Object> keyValueMap;
|
||||
DBObject keyValueMap;
|
||||
|
||||
if (existingValue == null) {
|
||||
keyValueMap = new LinkedHashMap<String, Object>();
|
||||
keyValueMap = new BasicDBObject();
|
||||
this.modifierOps.put(operator, keyValueMap);
|
||||
} else {
|
||||
if (existingValue instanceof LinkedHashMap) {
|
||||
keyValueMap = (LinkedHashMap<String, Object>) existingValue;
|
||||
if (existingValue instanceof BasicDBObject) {
|
||||
keyValueMap = (BasicDBObject) existingValue;
|
||||
} else {
|
||||
throw new InvalidDataAccessApiUsageException("Modifier Operations should be a LinkedHashMap but was "
|
||||
+ existingValue.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
keyValueMap.put(key, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,217 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.SpelNode;
|
||||
import org.springframework.expression.spel.ast.Literal;
|
||||
import org.springframework.expression.spel.ast.MethodReference;
|
||||
import org.springframework.expression.spel.ast.Operator;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A value object for nodes in an expression. Allows iterating ove potentially available child {@link ExpressionNode}s.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ExpressionNode implements Iterable<ExpressionNode> {
|
||||
|
||||
private static final Iterator<ExpressionNode> EMPTY_ITERATOR = Collections.<ExpressionNode> emptySet().iterator();
|
||||
|
||||
private final SpelNode node;
|
||||
private final ExpressionState state;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNode} from the given {@link SpelNode} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
protected ExpressionNode(SpelNode node, ExpressionState state) {
|
||||
|
||||
Assert.notNull(node, "SpelNode must not be null!");
|
||||
Assert.notNull(state, "ExpressionState must not be null!");
|
||||
|
||||
this.node = node;
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create {@link ExpressionNode}'s according to the given {@link SpelNode} and
|
||||
* {@link ExpressionState}.
|
||||
*
|
||||
* @param node
|
||||
* @param state must not be {@literal null}.
|
||||
* @return an {@link ExpressionNode} for the given {@link SpelNode} or {@literal null} if {@literal null} was given
|
||||
* for the {@link SpelNode}.
|
||||
*/
|
||||
public static ExpressionNode from(SpelNode node, ExpressionState state) {
|
||||
|
||||
if (node == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (node instanceof Operator) {
|
||||
return new OperatorNode((Operator) node, state);
|
||||
}
|
||||
|
||||
if (node instanceof MethodReference) {
|
||||
return new MethodReferenceNode((MethodReference) node, state);
|
||||
}
|
||||
|
||||
if (node instanceof Literal) {
|
||||
return new LiteralNode((Literal) node, state);
|
||||
}
|
||||
|
||||
return new ExpressionNode(node, state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getName() {
|
||||
return node.toStringAST();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current {@link ExpressionNode} is backed by the given type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean isOfType(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be empty!");
|
||||
return type.isAssignableFrom(node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link ExpressionNode} is representing the same backing node type as the current one.
|
||||
*
|
||||
* @param node
|
||||
* @return
|
||||
*/
|
||||
boolean isOfSameTypeAs(ExpressionNode node) {
|
||||
return node == null ? false : this.node.getClass().equals(node.node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} is a mathematical operation.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isMathematicalOperation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} is a literal.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isLiteral() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the current node.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getValue() {
|
||||
return node.getValue(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current node has child nodes.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean hasChildren() {
|
||||
return node.getChildCount() != 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the child {@link ExpressionNode} with the given index.
|
||||
*
|
||||
* @param index must not be negative.
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getChild(int index) {
|
||||
|
||||
Assert.isTrue(index >= 0);
|
||||
return from(node.getChild(index), state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} has a first child node that is not of the given type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean hasfirstChildNotOfType(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
return hasChildren() && !node.getChild(0).getClass().equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNode} from the given {@link SpelNode}.
|
||||
*
|
||||
* @param node
|
||||
* @return
|
||||
*/
|
||||
protected ExpressionNode from(SpelNode node) {
|
||||
return from(node, state);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<ExpressionNode> iterator() {
|
||||
|
||||
if (!hasChildren()) {
|
||||
return EMPTY_ITERATOR;
|
||||
}
|
||||
|
||||
return new Iterator<ExpressionNode>() {
|
||||
|
||||
int index = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index < node.getChildCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpressionNode next() {
|
||||
return from(node.getChild(index++));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* The context for an {@link ExpressionNode} transformation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ExpressionTransformationContextSupport<T extends ExpressionNode> {
|
||||
|
||||
private final T currentNode;
|
||||
private final ExpressionNode parentNode;
|
||||
private final DBObject previousOperationObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionTransformationContextSupport} for the given {@link ExpressionNode}s and an optional
|
||||
* previous operation.
|
||||
*
|
||||
* @param currentNode must not be {@literal null}.
|
||||
* @param parentNode
|
||||
* @param previousOperationObject
|
||||
*/
|
||||
public ExpressionTransformationContextSupport(T currentNode, ExpressionNode parentNode,
|
||||
DBObject previousOperationObject) {
|
||||
|
||||
Assert.notNull(currentNode, "currentNode must not be null!");
|
||||
|
||||
this.currentNode = currentNode;
|
||||
this.parentNode = parentNode;
|
||||
this.previousOperationObject = previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public T getCurrentNode() {
|
||||
return currentNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the parent {@link ExpressionNode} or {@literal null} if none available.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getParentNode() {
|
||||
return parentNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the previously accumulated operaton object or {@literal null} if none available. Rather than manually
|
||||
* adding stuff to the object prefer using {@link #addToPreviousOrReturn(Object)} to transparently do if one is
|
||||
* present.
|
||||
*
|
||||
* @see #hasPreviousOperation()
|
||||
* @see #addToPreviousOrReturn(Object)
|
||||
* @return
|
||||
*/
|
||||
public DBObject getPreviousOperationObject() {
|
||||
return previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether a previous operation is present.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean hasPreviousOperation() {
|
||||
return getPreviousOperationObject() != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the parent node is of the same operation as the current node.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean parentIsSameOperation() {
|
||||
return parentNode == null ? false : currentNode.isOfSameTypeAs(parentNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given value to the previous operation and returns it.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public DBObject addToPreviousOperation(Object value) {
|
||||
extractArgumentListFrom(previousOperationObject).add(value);
|
||||
return previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given value to the previous operation if one is present or returns the value to add as is.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public Object addToPreviousOrReturn(Object value) {
|
||||
return hasPreviousOperation() ? addToPreviousOperation(value) : value;
|
||||
}
|
||||
|
||||
private BasicDBList extractArgumentListFrom(DBObject context) {
|
||||
return (BasicDBList) context.get(context.keySet().iterator().next());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
/**
|
||||
* SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an
|
||||
* object.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface ExpressionTransformer<T extends ExpressionTransformationContextSupport<?>> {
|
||||
|
||||
/**
|
||||
* Transforms the given {@link ExpressionTransformationContextSupport} into an Object.
|
||||
*
|
||||
* @param context will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object transform(T context);
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.FloatLiteral;
|
||||
import org.springframework.expression.spel.ast.IntLiteral;
|
||||
import org.springframework.expression.spel.ast.Literal;
|
||||
import org.springframework.expression.spel.ast.LongLiteral;
|
||||
import org.springframework.expression.spel.ast.NullLiteral;
|
||||
import org.springframework.expression.spel.ast.RealLiteral;
|
||||
import org.springframework.expression.spel.ast.StringLiteral;
|
||||
|
||||
/**
|
||||
* A node representing a literal in an expression.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class LiteralNode extends ExpressionNode {
|
||||
|
||||
private final Literal literal;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LiteralNode} from the given {@link Literal} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
LiteralNode(Literal node, ExpressionState state) {
|
||||
super(node, state);
|
||||
this.literal = node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link ExpressionNode} is a unary minus.
|
||||
*
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryMinus(ExpressionNode parent) {
|
||||
|
||||
if (!(parent instanceof OperatorNode)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OperatorNode operator = (OperatorNode) parent;
|
||||
return operator.isUnaryMinus() && operator.getRight() == null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionNode#isLiteral()
|
||||
*/
|
||||
@Override
|
||||
public boolean isLiteral() {
|
||||
return literal instanceof FloatLiteral || literal instanceof RealLiteral || literal instanceof IntLiteral
|
||||
|| literal instanceof LongLiteral || literal instanceof StringLiteral || literal instanceof NullLiteral;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.MethodReference;
|
||||
|
||||
/**
|
||||
* An {@link ExpressionNode} representing a method reference.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MethodReferenceNode extends ExpressionNode {
|
||||
|
||||
private static final Map<String, String> FUNCTIONS;
|
||||
|
||||
static {
|
||||
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
|
||||
map.put("concat", "$concat"); // Concatenates two strings.
|
||||
map.put("strcasecmp", "$strcasecmp"); // Compares two strings and returns an integer that reflects the comparison.
|
||||
map.put("substr", "$substr"); // Takes a string and returns portion of that string.
|
||||
map.put("toLower", "$toLower"); // Converts a string to lowercase.
|
||||
map.put("toUpper", "$toUpper"); // Converts a string to uppercase.
|
||||
|
||||
map.put("dayOfYear", "$dayOfYear"); // Converts a date to a number between 1 and 366.
|
||||
map.put("dayOfMonth", "$dayOfMonth"); // Converts a date to a number between 1 and 31.
|
||||
map.put("dayOfWeek", "$dayOfWeek"); // Converts a date to a number between 1 and 7.
|
||||
map.put("year", "$year"); // Converts a date to the full year.
|
||||
map.put("month", "$month"); // Converts a date into a number between 1 and 12.
|
||||
map.put("week", "$week"); // Converts a date into a number between 0 and 53
|
||||
map.put("hour", "$hour"); // Converts a date into a number between 0 and 23.
|
||||
map.put("minute", "$minute"); // Converts a date into a number between 0 and 59.
|
||||
map.put("second", "$second"); // Converts a date into a number between 0 and 59. May be 60 to account for leap
|
||||
// seconds.
|
||||
map.put("millisecond", "$millisecond"); // Returns the millisecond portion of a date as an integer between 0 and
|
||||
|
||||
FUNCTIONS = Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
MethodReferenceNode(MethodReference reference, ExpressionState state) {
|
||||
super(reference, state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the method.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getMethodName() {
|
||||
|
||||
String name = getName();
|
||||
String methodName = name.substring(0, name.indexOf('('));
|
||||
return FUNCTIONS.get(methodName);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.OpDivide;
|
||||
import org.springframework.expression.spel.ast.OpMinus;
|
||||
import org.springframework.expression.spel.ast.OpModulus;
|
||||
import org.springframework.expression.spel.ast.OpMultiply;
|
||||
import org.springframework.expression.spel.ast.OpPlus;
|
||||
import org.springframework.expression.spel.ast.Operator;
|
||||
|
||||
/**
|
||||
* An {@link ExpressionNode} representing an operator.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class OperatorNode extends ExpressionNode {
|
||||
|
||||
private static final Map<String, String> OPERATORS;
|
||||
|
||||
static {
|
||||
|
||||
Map<String, String> map = new HashMap<String, String>(6);
|
||||
|
||||
map.put("+", "$add");
|
||||
map.put("-", "$subtract");
|
||||
map.put("*", "$multiply");
|
||||
map.put("/", "$divide");
|
||||
map.put("%", "$mod");
|
||||
|
||||
OPERATORS = Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
private final Operator operator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link OperatorNode} from the given {@link Operator} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
OperatorNode(Operator node, ExpressionState state) {
|
||||
super(node, state);
|
||||
this.operator = node;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionNode#isMathematicalOperation()
|
||||
*/
|
||||
@Override
|
||||
public boolean isMathematicalOperation() {
|
||||
return operator instanceof OpMinus || operator instanceof OpPlus || operator instanceof OpMultiply
|
||||
|| operator instanceof OpDivide || operator instanceof OpModulus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the operator is unary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryOperator() {
|
||||
return operator.getRightOperand() == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Mongo expression of the operator.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getMongoOperator() {
|
||||
return OPERATORS.get(operator.getOperatorName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the operator is a unary minus, e.g. -1.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryMinus() {
|
||||
return isUnaryOperator() && operator instanceof OpMinus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the left operand as {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getLeft() {
|
||||
return from(operator.getLeftOperand());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the right operand as {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getRight() {
|
||||
return from(operator.getRightOperand());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support classes to transform SpEL expressions into MongoDB expressions.
|
||||
* @since 1.4
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
@@ -158,7 +158,15 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#find(com.mongodb.DBObject)
|
||||
*/
|
||||
public List<GridFSDBFile> find(Query query) {
|
||||
return getGridFs().find(getMappedQuery(query));
|
||||
|
||||
if (query == null) {
|
||||
return getGridFs().find((DBObject) null);
|
||||
}
|
||||
|
||||
DBObject queryObject = getMappedQuery(query.getQueryObject());
|
||||
DBObject sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
return getGridFs().find(queryObject, sortObject);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -221,7 +229,11 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
}
|
||||
|
||||
private DBObject getMappedQuery(Query query) {
|
||||
return query == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
return query == null ? null : getMappedQuery(query.getQueryObject());
|
||||
}
|
||||
|
||||
private DBObject getMappedQuery(DBObject query) {
|
||||
return query == null ? null : queryMapper.getMappedObject(query, null);
|
||||
}
|
||||
|
||||
private GridFS getGridFs() {
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for MongoDB GridFS feature.
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2011 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,19 +15,20 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.monitor;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import org.springframework.jmx.export.annotation.ManagedMetric;
|
||||
import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.jmx.support.MetricType;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
|
||||
/**
|
||||
* Expose basic server information via JMX
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@ManagedResource(description = "Server Information")
|
||||
public class ServerInfo extends AbstractMonitor {
|
||||
@@ -36,9 +37,20 @@ public class ServerInfo extends AbstractMonitor {
|
||||
this.mongo = mongo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hostname of the used server reported by mongo.
|
||||
*
|
||||
* @return the reported hostname can also be an IP address.
|
||||
* @throws UnknownHostException
|
||||
*/
|
||||
@ManagedOperation(description = "Server host name")
|
||||
public String getHostName() throws UnknownHostException {
|
||||
return InetAddress.getLocalHost().getHostName();
|
||||
|
||||
/*
|
||||
* UnknownHostException is not necessary anymore, but clients could have
|
||||
* called this method in a try..catch(UnknownHostException) already
|
||||
*/
|
||||
return getServerStatus().getServerUsed().getHost();
|
||||
}
|
||||
|
||||
@ManagedMetric(displayName = "Uptime Estimate")
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Spring Data's MongoDB abstraction.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
@@ -21,15 +21,19 @@ import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.data.annotation.QueryAnnotation;
|
||||
|
||||
/**
|
||||
* Annotation to declare finder queries directly on repository methods. Both attributes allow using a placeholder
|
||||
* notation of {@code ?0}, {@code ?1} and so on.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* CDI support for MongoDB specific repository implementation.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.cdi;
|
||||
|
||||
@@ -35,6 +35,7 @@ import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
* {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of annotated class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -119,4 +120,10 @@ public @interface EnableMongoRepositories {
|
||||
* @return
|
||||
*/
|
||||
boolean createIndexesForQueryMethods() default false;
|
||||
|
||||
/**
|
||||
* Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the
|
||||
* repositories infrastructure.
|
||||
*/
|
||||
boolean considerNestedRepositories() default false;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support infrastructure for the configuration of MongoDB specific repositories.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.config;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user