Compare commits

..

23 Commits

Author SHA1 Message Date
Spring Buildmaster
edd71cac78 Release version 1.0.1.RELEASE. 2012-02-11 06:41:53 -08:00
Oliver Gierke
82bd7a69eb DATAMONGO-395 - Updated changelog for 1.0.1. 2012-02-11 14:33:30 +01:00
Oliver Gierke
b434a0810e DATAMONGO-401 - Fixed NullPointerException in StringBasedMongoQuery. 2012-02-11 14:32:22 +01:00
Oliver Gierke
40236d4099 DATAMONGO-380 - Improved map handling for keys containing dots.
MappingMongoConverter now rejects objects that would result in field keys containing a dot as we cannot reliably escape and unescape them without potentially wrecking correct keys on reading. However I added a property mapKeyReplacement that can be set to e.g. ~ to have all dots in map keys replaced with ~. This will of course cause ~ to be transformed into dots when reading. If further customization is necessary override potentiallyEscapeMapKey(…) and potentiallyUnescapeMapKey(…).
2012-02-10 18:20:43 +01:00
Oliver Gierke
8f6d940036 DATAMONGO-397 - Replaced references to MongoTemplate with MongoOperations in repository package.
The reference in MongoRepositoryFactoryBean to MongoTemplate was unnecessary on the one hand and could cause problems in case the MongoTemplate is proxied as it can't be wired into the factory anymore then.
2012-02-10 17:11:27 +01:00
Oliver Gierke
95a92ccf5d DATAMONGO-395 - Removed invalid XML element. 2012-02-08 21:24:34 +01:00
Oliver Gierke
a6db24554f DATAMONGO-395 - Upgrade to Spring Data Commons 1.2.1. 2012-02-08 21:24:34 +01:00
Oliver Gierke
2f6c61ef9c DATAMONGO-395 - Polished pom.xml for Maven Central release. 2012-02-08 21:24:29 +01:00
Oliver Gierke
d8bf7ebf3f DATAMONGO-395 - Refer to latest core repository documentation. 2012-02-08 20:20:09 +01:00
Oliver Gierke
ce42783e73 DATACMNS-390 - Added UUIDToBinaryConverter to be able to handle UUIDs by default. 2012-02-06 15:16:24 +01:00
Oliver Gierke
69474327c6 DATAMONGO-358 - Fixed collection reading when property type is no a collection.
If you have a property of type object and it contains a collection we didn't property read it back in as creating the collection instance failed due to an invalid call to CollectionFactory. We now default the parameter handed to that call to List in case the property type is not a Collection at all.
2012-02-01 16:26:37 +01:00
Oliver Gierke
1bbe2e8247 DATAMONGO-385 - Added test case to show repositories working with Long id. 2012-02-01 15:30:02 +01:00
Oliver Gierke
94af898ae3 DATAMONGO-387 - Repository query execution for GeoPage results are now working correctly.
Added special handling of GeoPage return types for repository query methods.
2012-02-01 14:42:49 +01:00
Oliver Gierke
f6298f7005 DATAMONGO-375 - Polished XSD by removing unnecessary version numbers. 2012-02-01 10:00:19 +01:00
Oliver Gierke
d5b3c651b2 Consolidate Maven repository usage to use repo.springsource.org/libs-snapshot. 2012-01-31 18:47:52 +01:00
Oliver Gierke
33dd00f0b8 DATAMONGO-379 - Improved entity instantiation.
Huge refactoring of the way MappingMongoConverter instantiates entities. The constructor arguments now have to mirror a property exactly in terms of name. Thus we can pick up mapping information from the property to lookup the correct value from the source document. The @Value annotation can be used to either inject completely arbitrary values into the instance (e.g. by referring to a Spring bean) or simply define an expression against DBObject's fields:

class Sample {
  String foo;
  String bar;

  Sample(String foo, @Value("#root._bar") String bar) {
    this.foo = foo;
    this.bar = bar;
  }
}

trying to create an instance of this class from

{ "foo" : "FOO" } -> new Sample("FOO", null)
{ "_bar" : "BAR" } -> new Sample(null, "BAR").
2012-01-16 18:37:00 +00:00
Oliver Gierke
3207a81555 DATAMONGO-368 - MappingMongoConverter does not remove null values from collections anymore. 2012-01-16 18:36:47 +00:00
Oliver Gierke
d231519012 DATAMONGO-376 - Fixed potential NPE in SpringDataMongodbSerializer. 2012-01-12 12:26:39 +01:00
Oliver Gierke
e052ecc9a4 Polished formatting and Javadoc. 2012-01-12 12:26:10 +01:00
Oliver Gierke
071f2934a1 DATAMONGO-369 - Fixed query mapping when a DBObject is included in the query object.
Replaced premature return with continue to break the for loop appropriately.
2012-01-12 11:20:16 +01:00
Oliver Gierke
d2a18e9b11 DATAMONGO-373 - Fixed potential ClassCastException in QueryMapper.
QueryMapper assumed finding a BasicBSONList for $(n)or operators. This is generally true if the DBObject was created through our Query abstraction. If you use the MongoDB driver QueryBuilder this will fail. We're now only insisting on an Iterable which fixes the issue.
2012-01-12 11:20:05 +01:00
Oliver Gierke
d684fa1f8e Fixed broken integration test. 2012-01-12 11:19:43 +01:00
Oliver Gierke
1a0077231d DATAMONGO-357 - Prepare 1.0.1 development branch. 2012-01-12 11:15:39 +01:00
161 changed files with 2472 additions and 7472 deletions

View File

@@ -1,291 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="12">
<profile kind="CodeFormatterProfile" name="Spring Data" version="12">
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
<setting id="org.eclipse.jdt.core.compiler.source" value="1.7"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.7"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
</profile>
</profiles>

52
pom.xml
View File

@@ -4,7 +4,9 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-dist</artifactId>
<name>Spring Data MongoDB Distribution</name>
<version>1.1.0.M2</version>
<description>Spring Data project for MongoDB</description>
<url>http://www.springsource.org/spring-data/mongodb</url>
<version>1.0.1.RELEASE</version>
<packaging>pom</packaging>
<modules>
<module>spring-data-mongodb</module>
@@ -13,7 +15,18 @@
<module>spring-data-mongodb-parent</module>
</modules>
<developers>
<developers>
<developer>
<id>ogierke</id>
<name>Oliver Gierke</name>
<email>ogierke at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.springsource.com</organizationUrl>
<roles>
<role>Project lead</role>
</roles>
<timezone>+1</timezone>
</developer>
<developer>
<id>trisberg</id>
<name>Thomas Risberg</name>
@@ -38,17 +51,6 @@
</roles>
<timezone>-5</timezone>
</developer>
<developer>
<id>ogierke</id>
<name>Oliver Gierke</name>
<email>ogierke at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.springsource.com</organizationUrl>
<roles>
<role>Developer</role>
</roles>
<timezone>+1</timezone>
</developer>
<developer>
<id>jbrisbin</id>
<name>Jon Brisbin</name>
@@ -149,7 +151,7 @@
<entities>
<entity>
<name>version</name>
<value>${project.version}</value>
<value>${pom.version}</value>
</entity>
</entities>
<postProcess>
@@ -260,10 +262,16 @@
</build>
<pluginRepositories>
<!-- Necessary for the build extension -->
<pluginRepository>
<id>spring-plugins-release</id>
<url>http://repo.springsource.org/plugins-release</url>
<!-- necessary for bundlor and utils -->
<id>repository.plugin.springsource.release</id>
<name>SpringSource Maven Repository</name>
<url>http://repository.springsource.com/maven/bundles/release</url>
</pluginRepository>
<pluginRepository>
<id>spring-libs-release</id>
<name>Spring Framework Maven Release Repository</name>
<url>http://repo.springsource.org/libs-release</url>
</pluginRepository>
</pluginRepositories>
@@ -277,9 +285,9 @@
</url>
</site>
<repository>
<id>spring-milestone</id>
<name>Spring Milestone Repository</name>
<url>s3://maven.springframework.org/milestone</url>
<id>spring-release</id>
<name>Spring Release Repository</name>
<url>s3://maven.springframework.org/release</url>
</repository>
<snapshotRepository>
<id>spring-snapshot</id>
@@ -287,5 +295,7 @@
<url>s3://maven.springframework.org/snapshot</url>
</snapshotRepository>
</distributionManagement>
<scm>
<url>https://github.com/SpringSource/spring-data-mongodb</url>
</scm>
</project>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>1.1.0.M2</version>
<version>1.0.1.RELEASE</version>
<relativePath>../spring-data-mongodb-parent/pom.xml</relativePath>
</parent>
<artifactId>spring-data-mongodb-cross-store</artifactId>
@@ -15,12 +15,10 @@
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
@@ -30,19 +28,82 @@
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<!-- Spring Data -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons-core</artifactId>
<version>${data.commons.version}</version>
</dependency>
<!-- <dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons-aspects</artifactId>
</dependency> -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>1.1.0.M2</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>javax.jms</groupId>
<artifactId>jms</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>jsr250-api</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>

View File

@@ -58,8 +58,10 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
this.entityManagerFactory = entityManagerFactory;
}
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
throws DataAccessException, NotFoundException {
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass,
Object id, final ChangeSet changeSet)
throws DataAccessException, NotFoundException {
if (id == null) {
log.debug("Unable to load MongoDB data for null id");
@@ -75,7 +77,8 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
log.debug("Loading MongoDB data for " + dbk);
}
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
public Object doInCollection(DBCollection collection)
throws MongoException, DataAccessException {
for (DBObject dbo : collection.find(dbk)) {
String key = (String) dbo.get(ENTITY_FIELD_NAME);
if (log.isDebugEnabled()) {
@@ -84,8 +87,9 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
if (!changeSet.getValues().containsKey(key)) {
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
if (className == null) {
throw new DataIntegrityViolationException("Unble to convert property " + key + ": Invalid metadata, "
+ ENTITY_FIELD_CLASS + " not available");
throw new DataIntegrityViolationException(
"Unble to convert property " + key
+ ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
}
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
Object value = mongoTemplate.getConverter().read(clazz, dbo);
@@ -131,8 +135,10 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
dbQuery.put(ENTITY_FIELD_NAME, key);
DBObject dbId = mongoTemplate.execute(collName, new CollectionCallback<DBObject>() {
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
DBObject dbId = mongoTemplate.execute(collName,
new CollectionCallback<DBObject>() {
public DBObject doInCollection(DBCollection collection)
throws MongoException, DataAccessException {
return collection.findOne(dbQuery);
}
});
@@ -141,12 +147,14 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
log.debug("Flush: removing: " + dbQuery);
}
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
public Object doInCollection(DBCollection collection)
throws MongoException, DataAccessException {
collection.remove(dbQuery);
return null;
}
});
} else {
}
else {
final DBObject dbDoc = new BasicDBObject();
dbDoc.putAll(dbQuery);
if (log.isDebugEnabled()) {
@@ -158,7 +166,8 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
dbDoc.put("_id", dbId.get("_id"));
}
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
public Object doInCollection(DBCollection collection)
throws MongoException, DataAccessException {
collection.save(dbDoc);
return null;
}

View File

@@ -38,234 +38,235 @@ import org.springframework.data.crossstore.HashMapChangeSet;
import org.springframework.transaction.support.TransactionSynchronizationManager;
/**
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
* Aspect to turn an object annotated with @Document into a persistent document
* using Mongo.
*
* @author Thomas Risberg
*/
public aspect MongoDocumentBacking {
private static final Log LOGGER = LogFactory.getLog(MongoDocumentBacking.class);
private static final Log LOGGER = LogFactory
.getLog(MongoDocumentBacking.class);
// Aspect shared config
private ChangeSetPersister<Object> changeSetPersister;
// Aspect shared config
private ChangeSetPersister<Object> changeSetPersister;
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
this.changeSetPersister = changeSetPersister;
}
public void setChangeSetPersister(
ChangeSetPersister<Object> changeSetPersister) {
this.changeSetPersister = changeSetPersister;
}
// ITD to introduce N state to Annotated objects
declare parents : (@Entity *) implements DocumentBacked;
// ITD to introduce N state to Annotated objects
declare parents : (@Entity *) implements DocumentBacked;
// The annotated fields that will be persisted in MongoDB rather than with JPA
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
// The annotated fields that will be persisted in MongoDB rather than with JPA
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
// -------------------------------------------------------------------------
// Advise user-defined constructors of ChangeSetBacked objects to create a new
// backing ChangeSet
// -------------------------------------------------------------------------
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
// -------------------------------------------------------------------------
// Advise user-defined constructors of ChangeSetBacked objects to create a new
// backing ChangeSet
// -------------------------------------------------------------------------
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
execution((DocumentBacked+).new(..)) &&
!execution((DocumentBacked+).new(ChangeSet)) &&
this(entity);
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity,
ChangeSet cs) :
execution((DocumentBacked+).new(ChangeSet)) &&
this(entity) &&
args(cs);
protected pointcut entityFieldGet(DocumentBacked entity) :
protected pointcut entityFieldGet(DocumentBacked entity) :
get(@RelatedDocument * DocumentBacked+.*) &&
this(entity) &&
!get(* DocumentBacked.*);
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
set(@RelatedDocument * DocumentBacked+.*) &&
this(entity) &&
args(newVal) &&
!set(* DocumentBacked.*);
// intercept EntityManager.merge calls
public pointcut entityManagerMerge(EntityManager em, Object entity) :
// intercept EntityManager.merge calls
public pointcut entityManagerMerge(EntityManager em, Object entity) :
call(* EntityManager.merge(Object)) &&
target(em) &&
args(entity);
// intercept EntityManager.remove calls
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
// call(* EntityManager.remove(Object)) &&
// target(em) &&
// args(entity);
// intercept EntityManager.remove calls
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
// call(* EntityManager.remove(Object)) &&
// target(em) &&
// args(entity);
// move changeSet from detached entity to the newly merged persistent object
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
Object mergedEntity = proceed(em, entity);
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
((DocumentBacked)mergedEntity).changeSet = ((DocumentBacked)entity).getChangeSet();
}
return mergedEntity;
}
// move changeSet from detached entity to the newly merged persistent object
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
Object mergedEntity = proceed(em, entity);
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
}
return mergedEntity;
// clear changeSet from removed entity
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
// if (entity instanceof DocumentBacked) {
// removeChangeSetValues((DocumentBacked)entity);
// }
// return proceed(em, entity);
// }
private static void removeChangeSetValues(DocumentBacked entity) {
LOGGER.debug("Removing all change-set values for " + entity);
ChangeSet nulledCs = new HashMapChangeSet();
DocumentBacked documentEntity = (DocumentBacked) entity;
@SuppressWarnings("unchecked")
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>)documentEntity.itdChangeSetPersister;
try {
changeSetPersister.getPersistentState(
documentEntity.getClass(),
documentEntity.get_persistent_id(),
documentEntity.getChangeSet());
}
catch (DataAccessException e) {}
catch (NotFoundException e) {}
for (String key :entity.getChangeSet().getValues().keySet()) {
nulledCs.set(key, null);
}
entity.setChangeSet(nulledCs);
}
// clear changeSet from removed entity
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
// if (entity instanceof DocumentBacked) {
// removeChangeSetValues((DocumentBacked)entity);
// }
// return proceed(em, entity);
// }
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
LOGGER
.debug("User-defined constructor called on DocumentBacked object of class "
+ entity.getClass());
// Populate all ITD fields
entity.setChangeSet(new HashMapChangeSet());
entity.itdChangeSetPersister = changeSetPersister;
entity.itdTransactionSynchronization =
new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
//registerTransactionSynchronization(entity);
}
private static void removeChangeSetValues(DocumentBacked entity) {
LOGGER.debug("Removing all change-set values for " + entity);
ChangeSet nulledCs = new HashMapChangeSet();
DocumentBacked documentEntity = (DocumentBacked) entity;
@SuppressWarnings("unchecked")
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
try {
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
documentEntity.getChangeSet());
} catch (DataAccessException e) {
} catch (NotFoundException e) {
}
for (String key : entity.getChangeSet().getValues().keySet()) {
nulledCs.set(key, null);
}
entity.setChangeSet(nulledCs);
}
private static void registerTransactionSynchronization(DocumentBacked entity) {
if (TransactionSynchronizationManager.isSynchronizationActive()) {
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Adding transaction synchronization for " + entity);
}
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
}
else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Transaction synchronization already active for " + entity);
}
}
}
else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Transaction synchronization is not active for " + entity);
}
}
}
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
// Populate all ITD fields
entity.setChangeSet(new HashMapChangeSet());
entity.itdChangeSetPersister = changeSetPersister;
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
// registerTransactionSynchronization(entity);
}
// -------------------------------------------------------------------------
// ChangeSet-related mixins
// -------------------------------------------------------------------------
// Introduced field
@Transient private ChangeSet DocumentBacked.changeSet;
private static void registerTransactionSynchronization(DocumentBacked entity) {
if (TransactionSynchronizationManager.isSynchronizationActive()) {
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Adding transaction synchronization for " + entity);
}
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Transaction synchronization already active for " + entity);
}
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Transaction synchronization is not active for " + entity);
}
}
}
@Transient private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
// -------------------------------------------------------------------------
// ChangeSet-related mixins
// -------------------------------------------------------------------------
// Introduced field
@Transient
private ChangeSet DocumentBacked.changeSet;
@Transient private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
@Transient
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
public void DocumentBacked.setChangeSet(ChangeSet cs) {
this.changeSet = cs;
}
@Transient
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
public ChangeSet DocumentBacked.getChangeSet() {
return changeSet;
}
public void DocumentBacked.setChangeSet(ChangeSet cs) {
this.changeSet = cs;
}
// Flush the entity state to the persistent store
public void DocumentBacked.flush() {
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
itdChangeSetPersister.persistState(this, this.changeSet);
}
public ChangeSet DocumentBacked.getChangeSet() {
return changeSet;
}
public Object DocumentBacked.get_persistent_id() {
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
}
// Flush the entity state to the persistent store
public void DocumentBacked.flush() {
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
itdChangeSetPersister.persistState(this, this.changeSet);
}
// lifecycle methods
@javax.persistence.PostPersist public void DocumentBacked.itdPostPersist() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
}
registerTransactionSynchronization(this);
}
@javax.persistence.PreUpdate public void DocumentBacked.itdPreUpdate() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
@javax.persistence.PostUpdate public void DocumentBacked.itdPostUpdate() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
@javax.persistence.PostRemove public void DocumentBacked.itdPostRemove() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
removeChangeSetValues(this);
}
@javax.persistence.PostLoad public void DocumentBacked.itdPostLoad() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
/**
* delegates field reads to the state accessors instance
*/
Object around(DocumentBacked entity): entityFieldGet(entity) {
Field f = field(thisJoinPoint);
String propName = f.getName();
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName
+ "] using: " + entity.getChangeSet());
if (entity.getChangeSet().getValues().get(propName) == null) {
try {
this.changeSetPersister.getPersistentState(entity.getClass(),
entity.get_persistent_id(), entity.getChangeSet());
} catch (NotFoundException e) {
}
}
Object fValue = entity.getChangeSet().getValues().get(propName);
if (fValue != null) {
return fValue;
}
return proceed(entity);
}
public Object DocumentBacked.get_persistent_id() {
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
}
/**
* delegates field writes to the state accessors instance
*/
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
Field f = field(thisJoinPoint);
String propName = f.getName();
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName
+ "] with value=[" + newVal + "]");
entity.getChangeSet().set(propName, newVal);
return proceed(entity, newVal);
}
// lifecycle methods
@javax.persistence.PostPersist
public void DocumentBacked.itdPostPersist() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
}
registerTransactionSynchronization(this);
}
@javax.persistence.PreUpdate
public void DocumentBacked.itdPreUpdate() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
@javax.persistence.PostUpdate
public void DocumentBacked.itdPostUpdate() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
@javax.persistence.PostRemove
public void DocumentBacked.itdPostRemove() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
removeChangeSetValues(this);
}
@javax.persistence.PostLoad
public void DocumentBacked.itdPostLoad() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
}
registerTransactionSynchronization(this);
}
/**
* delegates field reads to the state accessors instance
*/
Object around(DocumentBacked entity): entityFieldGet(entity) {
Field f = field(thisJoinPoint);
String propName = f.getName();
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
if (entity.getChangeSet().getValues().get(propName) == null) {
try {
this.changeSetPersister
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
} catch (NotFoundException e) {
}
}
Object fValue = entity.getChangeSet().getValues().get(propName);
if (fValue != null) {
return fValue;
}
return proceed(entity);
}
/**
* delegates field writes to the state accessors instance
*/
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
Field f = field(thisJoinPoint);
String propName = f.getName();
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
entity.getChangeSet().set(propName, newVal);
return proceed(entity, newVal);
}
Field field(JoinPoint joinPoint) {
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
return fieldSignature.getField();
}
Field field(JoinPoint joinPoint) {
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
return fieldSignature.getField();
}
}

View File

@@ -24,6 +24,8 @@ import java.lang.annotation.Target;
* @author Thomas Risberg
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.FIELD })
@Target({
ElementType.FIELD
})
public @interface RelatedDocument {
}

View File

@@ -90,7 +90,8 @@ public class CrossStoreMongoTests {
Assert.assertNotNull(found);
Assert.assertEquals(Long.valueOf(1), found.getId());
Assert.assertNotNull(found.getResume());
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; "
+ "VMware, Developer, 2007-", found.getResume().getJobs());
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
found.setAge(44);
}
@@ -105,7 +106,8 @@ public class CrossStoreMongoTests {
Assert.assertNotNull(found);
Assert.assertEquals(Long.valueOf(1), found.getId());
Assert.assertNotNull(found.getResume());
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; "
+ "VMware, Developer, 2007-" + "; "
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
}

View File

@@ -23,7 +23,8 @@ public class Address {
private String state;
private String zip;
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
public Address(Integer streetNumber, String streetName, String city,
String state, String zip) {
super();
this.streetNumber = streetNumber;
this.streetName = streetName;
@@ -35,41 +36,34 @@ public class Address {
public Integer getStreetNumber() {
return streetNumber;
}
public void setStreetNumber(Integer streetNumber) {
this.streetNumber = streetNumber;
}
public String getStreetName() {
return streetName;
}
public void setStreetName(String streetName) {
this.streetName = streetName;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getZip() {
return zip;
}
public void setZip(String zip) {
this.zip = zip;
}
}

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>1.1.0.M2</version>
<version>1.0.1.RELEASE</version>
<relativePath>../spring-data-mongodb-parent/pom.xml</relativePath>
</parent>
<artifactId>spring-data-mongodb-log4j</artifactId>
@@ -27,7 +27,45 @@
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>javax.jms</groupId>
<artifactId>jms</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
</exclusions>
<scope>compile</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<version>1.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@@ -4,25 +4,133 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<name>Spring Data MongoDB Parent</name>
<description>Spring Data project for MongoDB</description>
<url>http://www.springsource.org/spring-data/mongodb</url>
<version>1.1.0.M2</version>
<version>1.0.1.RELEASE</version>
<packaging>pom</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- versions for commonly-used dependencies -->
<junit.version>4.10</junit.version>
<log4j.version>1.2.16</log4j.version>
<org.mockito.version>1.9.0</org.mockito.version>
<org.slf4j.version>1.6.1</org.slf4j.version>
<junit.version>4.8.1</junit.version>
<log4j.version>1.2.15</log4j.version>
<org.mockito.version>1.8.4</org.mockito.version>
<org.slf4j.version>1.5.10</org.slf4j.version>
<org.codehaus.jackson.version>1.6.1</org.codehaus.jackson.version>
<org.springframework.version.30>3.0.7.RELEASE</org.springframework.version.30>
<org.springframework.version.40>4.0.0.RELEASE</org.springframework.version.40>
<org.springframework.version.range>[${org.springframework.version.30}, ${org.springframework.version.40})</org.springframework.version.range>
<data.commons.version>1.4.0.M1</data.commons.version>
<data.commons.version>1.2.1.RELEASE</data.commons.version>
<aspectj.version>1.6.11.RELEASE</aspectj.version>
<bundlor.failOnWarnings>true</bundlor.failOnWarnings>
</properties>
<developers>
<developer>
<id>ogierke</id>
<name>Oliver Gierke</name>
<email>ogierke at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.springsource.com</organizationUrl>
<roles>
<role>Project lead</role>
</roles>
<timezone>+1</timezone>
</developer>
<developer>
<id>trisberg</id>
<name>Thomas Risberg</name>
<email>trisberg at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.SpringSource.com</organizationUrl>
<roles>
<role>Project Admin</role>
<role>Developer</role>
</roles>
<timezone>-5</timezone>
</developer>
<developer>
<id>mpollack</id>
<name>Mark Pollack</name>
<email>mpollack at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.SpringSource.com</organizationUrl>
<roles>
<role>Project Admin</role>
<role>Developer</role>
</roles>
<timezone>-5</timezone>
</developer>
<developer>
<id>jbrisbin</id>
<name>Jon Brisbin</name>
<email>jbrisbin at vmware.com</email>
<organization>SpringSource</organization>
<organizationUrl>http://www.springsource.com</organizationUrl>
<roles>
<role>Developer</role>
</roles>
<timezone>-6</timezone>
</developer>
</developers>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0</url>
<comments>
Copyright 2010 the original author or authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.
See the License for the specific language governing permissions and
limitations under the License.
</comments>
</license>
</licenses>
<profiles>
<profile>
<id>strict</id>
<properties>
<maven.test.failure.ignore>false</maven.test.failure.ignore>
</properties>
</profile>
<profile>
<id>fast</id>
<properties>
<maven.test.skip>true</maven.test.skip>
<maven.javadoc.skip>true</maven.javadoc.skip>
</properties>
</profile>
<profile>
<id>staging</id>
<distributionManagement>
<site>
<id>spring-site-staging</id>
<url>file:///${java.io.tmpdir}/spring-data/mongodb/docs</url>
</site>
<repository>
<id>spring-milestone-staging</id>
<url>file:///${java.io.tmpdir}/spring-data/mongodb/milestone</url>
</repository>
<snapshotRepository>
<id>spring-snapshot-staging</id>
<url>file:///${java.io.tmpdir}/spring-data/mongodb/snapshot</url>
</snapshotRepository>
</distributionManagement>
</profile>
<profile>
<id>bootstrap</id>
<!-- TODO: move the repositories in here before release -->
</profile>
</profiles>
<distributionManagement>
<!-- see 'staging' profile for dry-run deployment settings -->
<downloadUrl>http://www.springsource.com/download/community
@@ -34,9 +142,9 @@
</url>
</site>
<repository>
<id>spring-milestone</id>
<name>Spring Milestone Repository</name>
<url>s3://maven.springframework.org/milestone</url>
<id>spring-release</id>
<name>Spring Release Repository</name>
<url>s3://maven.springframework.org/release</url>
</repository>
<snapshotRepository>
<id>spring-snapshot</id>
@@ -44,68 +152,166 @@
<url>s3://maven.springframework.org/snapshot</url>
</snapshotRepository>
</distributionManagement>
<scm>
<url>https://github.com/SpringSource/spring-data-mongodb</url>
</scm>
<dependencyManagement>
<!--
inheritable <dependency> declarations for child poms. children still
must explicitly declare the groupId/artifactId of these dependencies
in order for them to show up on the classpath, but metadata like
<version> and <scope> are inherited, which cuts down on verbosity.
see
http://www.sonatype.com/books/mvnref-book/reference/pom-relationships-sect-dep-manage.html
-->
<dependencies>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aop</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-expression</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${org.springframework.version.range}</version>
<scope>test</scope>
</dependency>
<!-- Spring Data -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons-core</artifactId>
<version>${data.commons.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons-aspects</artifactId>
<version>${data.commons.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-couchdb</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>1.0.1.RELEASE</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${org.slf4j.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>${org.slf4j.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${org.slf4j.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>javax.jms</groupId>
<artifactId>jms</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>jsr250-api</artifactId>
<version>1.0</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>${org.mockito.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!-- Test dependencies -->
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>1.2.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit-dep</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${org.mockito.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${org.slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>${org.slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${org.slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${org.springframework.version.range}</version>
<scope>test</scope>
</dependency>
<!--
dependency definitions to be inherited by child poms. any
<dependency> declarations here will automatically show up on child
project classpaths. only items that are truly common across all
projects (modules and samples) should go here. otherwise, consider
<dependencyManagement> above
-->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@@ -161,7 +367,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.4</version>
<version>2.3.2</version>
<configuration>
<source>1.5</source>
<target>1.5</target>
@@ -181,16 +387,13 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.12</version>
<version>2.8</version>
<configuration>
<useFile>false</useFile>
<includes>
<include>**/*Tests.java</include>
</includes>
<excludes>
<exclude>**/PerformanceTests.java</exclude>
</excludes>
<junitArtifactName>junit:junit-dep</junitArtifactName>
<junitArtifactName>junit:junit</junitArtifactName>
</configuration>
</plugin>
<plugin>
@@ -226,7 +429,7 @@
<artifactId>com.springsource.bundlor.maven</artifactId>
<version>1.0.0.RELEASE</version>
<configuration>
<failOnWarnings>${bundlor.failOnWarnings}</failOnWarnings>
<failOnWarnings>true</failOnWarnings>
</configuration>
<executions>
<execution>
@@ -248,8 +451,8 @@
</pluginRepositories>
<repositories>
<repository>
<id>spring-libs-snapshot</id>
<url>http://repo.springsource.org/libs-snapshot</url>
<id>spring-libs-release</id>
<url>http://repo.springsource.org/libs-release</url>
</repository>
</repositories>
<reporting>

View File

@@ -49,12 +49,6 @@
</element>
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
</element>
<element name="GridFS" type="Layer">
<element name="Assignment" type="TypeFilter">
<element name="**.gridfs.**" type="IncludeTypePattern"/>
</element>
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
</element>
<element name="Core" type="Layer">
<element name="Assignment" type="TypeFilter">
<element name="**.core.**" type="IncludeTypePattern"/>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>1.1.0.M2</version>
<version>1.0.1.RELEASE</version>
<relativePath>../spring-data-mongodb-parent/pom.xml</relativePath>
</parent>
<artifactId>spring-data-mongodb</artifactId>
@@ -12,45 +12,24 @@
<properties>
<mongo.version>2.7.1</mongo.version>
<querydsl.version>2.6.0</querydsl.version>
<cdi.version>1.0</cdi.version>
<validation.version>1.0.0.GA</validation.version>
<webbeans.version>1.1.3</webbeans.version>
<querydsl.version>2.3.0</querydsl.version>
</properties>
<dependencies>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${org.springframework.version.range}</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-expression</artifactId>
<version>${org.springframework.version.range}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<!-- Spring Data -->
@@ -96,81 +75,59 @@
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>jsr250-api</artifactId>
<version>1.0</version>
<optional>true</optional>
</dependency>
<!-- CDI -->
<dependency>
<groupId>javax.enterprise</groupId>
<artifactId>cdi-api</artifactId>
<version>${cdi.version}</version>
<scope>provided</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>javax.el</groupId>
<artifactId>el-api</artifactId>
<version>${cdi.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.openwebbeans.test</groupId>
<artifactId>cditest-owb</artifactId>
<version>${webbeans.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>2.5</version>
<scope>test</scope>
</dependency>
<!-- JSR 303 Validation -->
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>${validation.version}</version>
<optional>true</optional>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>4.2.0.Final</version>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<version>1.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>performance-tests</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.8</version>
<configuration>
<includes>
<include>**/PerformanceTests.java</include>
</includes>
<excludes>
<exclude>none</exclude>
</excludes>
<junitArtifactName>junit:junit-dep</junitArtifactName>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<plugins>
<plugin>

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,51 +16,42 @@
package org.springframework.data.mongodb;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.data.authentication.UserCredentials;
/**
* Exception being thrown in case we cannot connect to a MongoDB instance.
*
* @author Oliver Gierke
*/
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
private final UserCredentials credentials;
private final String database;
private String username;
private char[] password;
private String database;
private static final long serialVersionUID = 1172099106475265589L;
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
super(msg, cause);
this.database = null;
this.credentials = UserCredentials.NO_CREDENTIALS;
}
public CannotGetMongoDbConnectionException(String msg) {
this(msg, null, UserCredentials.NO_CREDENTIALS);
}
public CannotGetMongoDbConnectionException(String msg, String database, UserCredentials credentials) {
super(msg);
}
public CannotGetMongoDbConnectionException(String msg, String database, String username, char[] password) {
super(msg);
this.username = username;
this.password = password == null ? null : password.clone();
this.database = database;
this.credentials = credentials;
}
/**
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
*
* @return
*/
public UserCredentials getCredentials() {
return this.credentials;
public String getUsername() {
return username;
}
public char[] getPassword() {
return password;
}
/**
* Returns the name of the database trying to be accessed.
*
* @return
*/
public String getDatabase() {
return database;
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,176 +15,88 @@
*/
package org.springframework.data.mongodb.config;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import com.mongodb.Mongo;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.data.annotation.Persistent;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import com.mongodb.Mongo;
/**
* Base class for Spring Data Mongo configuration using JavaConfig.
*
* @author Mark Pollack
* @author Oliver Gierke
*/
@Configuration
public abstract class AbstractMongoConfiguration {
/**
* Return the name of the database to connect to.
*
* @return must not be {@literal null}.
*/
protected abstract String getDatabaseName();
public abstract String getDatabaseName();
/**
* Return the {@link Mongo} instance to connect to.
*
* @return
* @throws Exception
*/
@Bean
public abstract Mongo mongo() throws Exception;
/**
* Creates a {@link MongoTemplate}.
*
* @return
* @throws Exception
*/
@Bean
public MongoTemplate mongoTemplate() throws Exception {
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
}
/**
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link Mongo} instance
* configured in {@link #mongo()}.
*
* @see #mongo()
* @see #mongoTemplate()
* @return
* @throws Exception
*/
@Bean
public SimpleMongoDbFactory mongoDbFactory() throws Exception {
UserCredentials credentials = getUserCredentials();
if (credentials == null) {
public MongoDbFactory mongoDbFactory() throws Exception {
if (getUserCredentials() == null) {
return new SimpleMongoDbFactory(mongo(), getDatabaseName());
} else {
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), credentials);
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials());
}
}
/**
* Return the base package to scan for mapped {@link Document}s.
*
* @return
*/
protected String getMappingBasePackage() {
public String getMappingBasePackage() {
return "";
}
public UserCredentials getUserCredentials() {
return null;
}
/**
* Return {@link UserCredentials} to be used when connecting to the MongoDB instance or {@literal null} if none shall
* be used.
*
* @return
*/
protected UserCredentials getUserCredentials() {
return null;
}
/**
* Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package.
*
* @see #getMappingBasePackage()
* @return
* @throws ClassNotFoundException
*/
@Bean
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException, LinkageError {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(getInitialEntitySet());
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
mappingContext.initialize();
return mappingContext;
}
/**
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
* {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default.
*
* @return must not be {@literal null}.
*/
@Bean
public CustomConversions customConversions() {
return new CustomConversions(Collections.emptyList());
}
/**
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
*
* @see #customConversions()
* @see #mongoMappingContext()
* @see #mongoDbFactory()
* @return
* @throws Exception
*/
@Bean
public MappingMongoConverter mappingMongoConverter() throws Exception {
MappingMongoConverter converter = new MappingMongoConverter(mongoDbFactory(), mongoMappingContext());
converter.setCustomConversions(customConversions());
return converter;
}
/**
* Scans the mapping base package for classes annotated with {@link Document}.
*
* @see #getMappingBasePackage()
* @return
* @throws ClassNotFoundException
*/
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
String basePackage = getMappingBasePackage();
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
if (StringUtils.hasText(basePackage)) {
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
false);
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
initialEntitySet.add(ClassUtils.forName(candidate.getBeanClassName(),
AbstractMongoConfiguration.class.getClassLoader()));
initialEntitySet.add(ClassUtils.forName(candidate.getBeanClassName(), mappingContext.getClass()
.getClassLoader()));
}
mappingContext.setInitialEntitySet(initialEntitySet);
}
return mappingContext;
}
return initialEntitySet;
@Bean
public MappingMongoConverter mappingMongoConverter() throws Exception {
MappingMongoConverter converter = new MappingMongoConverter(mongoDbFactory(), mongoMappingContext());
afterMappingMongoConverterCreation(converter);
return converter;
}
/**
* Hook that allows post-processing after the MappingMongoConverter has been successfully created.
*
* @param converter
*/
protected void afterMappingMongoConverterCreation(MappingMongoConverter converter) {
}
}

View File

@@ -25,5 +25,4 @@ public abstract class BeanNames {
static final String INDEX_HELPER = "indexCreationHelper";
static final String MONGO = "mongo";
static final String DB_FACTORY = "mongoDbFactory";
static final String VALIDATING_EVENT_LISTENER = "validatingMongoEventListener";
}

View File

@@ -1,11 +1,11 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.springframework.data.mongodb.config.BeanNames.*;
@@ -29,14 +30,12 @@ import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedSet;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
@@ -53,25 +52,18 @@ import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
/**
* Bean definition parser for the {@code mapping-converter} element.
*
* @author Jon Brisbin
* @author Jon Brisbin <jbrisbin@vmware.com>
* @author Oliver Gierke
* @author Maciej Walkowiak
*/
public class MappingMongoConverterParser extends AbstractBeanDefinitionParser {
private static final String BASE_PACKAGE = "base-package";
private static final boolean jsr303Present = ClassUtils.isPresent("javax.validation.Validator",
MappingMongoConverterParser.class.getClassLoader());
@Override
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
@@ -115,53 +107,9 @@ public class MappingMongoConverterParser extends AbstractBeanDefinitionParser {
registry.registerBeanDefinition(INDEX_HELPER, indexHelperBuilder.getBeanDefinition());
}
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
if (validatingMongoEventListener != null) {
registry.registerBeanDefinition(VALIDATING_EVENT_LISTENER, validatingMongoEventListener);
}
return converterBuilder.getBeanDefinition();
}
private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) {
String disableValidation = element.getAttribute("disable-validation");
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation);
if (!validationDisabled) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition();
RuntimeBeanReference validator = getValidator(builder, parserContext);
if (validator != null) {
builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class);
builder.addConstructorArgValue(validator);
return builder.getBeanDefinition();
}
}
return null;
}
private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) {
if (!jsr303Present) {
return null;
}
RootBeanDefinition validatorDef = new RootBeanDefinition(
"org.springframework.validation.beanvalidation.LocalValidatorFactoryBean");
validatorDef.setSource(source);
validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef);
parserContext.registerComponent(new BeanComponentDefinition(validatorDef, validatorName));
return new RuntimeBeanReference(validatorName);
}
private String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
BeanDefinition conversionsDefinition) {

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 by the original author(s).
* Copyright 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,19 +15,19 @@
*/
package org.springframework.data.mongodb.config;
import static org.springframework.data.config.ParsingUtils.*;
import static org.springframework.data.mongodb.config.MongoParsingUtils.*;
import static org.springframework.data.mongodb.config.BeanNames.*;
import static org.springframework.data.mongodb.config.ParsingUtils.*;
import org.springframework.beans.factory.BeanDefinitionStoreException;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionReaderUtils;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.config.BeanComponentDefinitionBuilder;
import org.springframework.data.mongodb.core.MongoFactoryBean;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.util.StringUtils;
@@ -44,29 +44,19 @@ import com.mongodb.MongoURI;
*/
public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
/*
* (non-Javadoc)
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
*/
@Override
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
throws BeanDefinitionStoreException {
String id = super.resolveId(element, definition, parserContext);
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY;
String id = element.getAttribute("id");
if (!StringUtils.hasText(id)) {
id = DB_FACTORY;
}
return id;
}
/*
* (non-Javadoc)
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
*/
@Override
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
Object source = parserContext.extractSource(element);
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
String uri = element.getAttribute("uri");
String mongoRef = element.getAttribute("mongo-ref");
String dbname = element.getAttribute("dbname");
@@ -74,11 +64,12 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
// Common setup
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
ParsingUtils.setPropertyValue(element, dbFactoryBuilder, "write-concern", "writeConcern");
if (StringUtils.hasText(uri)) {
if (StringUtils.hasText(mongoRef) || StringUtils.hasText(dbname) || userCredentials != null) {
parserContext.getReaderContext().error("Configure either Mongo URI or details individually!", source);
parserContext.getReaderContext().error("Configure either Mongo URI or details individually!",
parserContext.extractSource(element));
}
dbFactoryBuilder.addConstructorArgValue(getMongoUri(uri));
@@ -86,26 +77,19 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
}
// Defaulting
if (StringUtils.hasText(mongoRef)) {
dbFactoryBuilder.addConstructorArgReference(mongoRef);
} else {
dbFactoryBuilder.addConstructorArgValue(registerMongoBeanDefinition(element, parserContext));
}
mongoRef = StringUtils.hasText(mongoRef) ? mongoRef : registerMongoBeanDefinition(element, parserContext);
dbname = StringUtils.hasText(dbname) ? dbname : "db";
dbFactoryBuilder.addConstructorArgValue(new RuntimeBeanReference(mongoRef));
dbFactoryBuilder.addConstructorArgValue(dbname);
if (userCredentials != null) {
dbFactoryBuilder.addConstructorArgValue(userCredentials);
}
BeanDefinitionBuilder writeConcernPropertyEditorBuilder = getWriteConcernPropertyEditorBuilder();
ParsingUtils.registerWriteConcernPropertyEditor(parserContext.getRegistry());
BeanComponentDefinition component = helper.getComponent(writeConcernPropertyEditorBuilder);
parserContext.registerBeanComponent(component);
return (AbstractBeanDefinition) helper.getComponentIdButFallback(dbFactoryBuilder, BeanNames.DB_FACTORY)
.getBeanDefinition();
return getSourceBeanDefinition(dbFactoryBuilder, parserContext, element);
}
/**
@@ -116,13 +100,14 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
* @param parserContext must not be {@literal null}.
* @return
*/
private BeanDefinition registerMongoBeanDefinition(Element element, ParserContext parserContext) {
private String registerMongoBeanDefinition(Element element, ParserContext parserContext) {
BeanDefinitionBuilder mongoBuilder = BeanDefinitionBuilder.genericBeanDefinition(MongoFactoryBean.class);
setPropertyValue(mongoBuilder, element, "host");
setPropertyValue(mongoBuilder, element, "port");
ParsingUtils.setPropertyValue(element, mongoBuilder, "host");
ParsingUtils.setPropertyValue(element, mongoBuilder, "port");
return getSourceBeanDefinition(mongoBuilder, parserContext, element);
return BeanDefinitionReaderUtils.registerWithGeneratedName(mongoBuilder.getBeanDefinition(),
parserContext.getRegistry());
}
/**

View File

@@ -16,9 +16,7 @@
package org.springframework.data.mongodb.config;
import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
import org.springframework.data.mongodb.repository.config.MongoRepositoryConfigurationExtension;
import org.springframework.data.repository.config.RepositoryBeanDefinitionParser;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
import org.springframework.data.mongodb.repository.config.MongoRepositoryConfigParser;
/**
* {@link org.springframework.beans.factory.xml.NamespaceHandler} for Mongo DB based repositories.
@@ -34,10 +32,7 @@ public class MongoNamespaceHandler extends NamespaceHandlerSupport {
*/
public void init() {
RepositoryConfigurationExtension extension = new MongoRepositoryConfigurationExtension();
RepositoryBeanDefinitionParser repositoryBeanDefinitionParser = new RepositoryBeanDefinitionParser(extension);
registerBeanDefinitionParser("repositories", repositoryBeanDefinitionParser);
registerBeanDefinitionParser("repositories", new MongoRepositoryConfigParser());
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
registerBeanDefinitionParser("mongo", new MongoParser());
registerBeanDefinitionParser("db-factory", new MongoDbFactoryParser());

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,20 +13,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import java.util.Map;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.BeanDefinitionStoreException;
import org.springframework.beans.factory.config.CustomEditorConfigurer;
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
import org.springframework.beans.factory.parsing.CompositeComponentDefinition;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionReaderUtils;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.config.BeanComponentDefinitionBuilder;
import org.springframework.data.config.ParsingUtils;
import org.springframework.data.mongodb.core.MongoFactoryBean;
import org.springframework.util.StringUtils;
import org.w3c.dom.Element;
@@ -35,59 +35,54 @@ import org.w3c.dom.Element;
* Parser for &lt;mongo;gt; definitions.
*
* @author Mark Pollack
* @author Oliver Gierke
*/
public class MongoParser implements BeanDefinitionParser {
public class MongoParser extends AbstractSingleBeanDefinitionParser {
/*
* (non-Javadoc)
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
*/
public BeanDefinition parse(Element element, ParserContext parserContext) {
@Override
protected Class<?> getBeanClass(Element element) {
return MongoFactoryBean.class;
}
Object source = parserContext.extractSource(element);
String id = element.getAttribute("id");
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
ParsingUtils.setPropertyValue(element, builder, "port", "port");
ParsingUtils.setPropertyValue(element, builder, "host", "host");
ParsingUtils.setPropertyValue(element, builder, "write-concern", "writeConcern");
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(MongoFactoryBean.class);
ParsingUtils.setPropertyValue(builder, element, "port", "port");
ParsingUtils.setPropertyValue(builder, element, "host", "host");
ParsingUtils.setPropertyValue(builder, element, "write-concern", "writeConcern");
ParsingUtils.parseMongoOptions(element, builder);
ParsingUtils.parseReplicaSet(element, builder);
MongoParsingUtils.parseMongoOptions(element, builder);
MongoParsingUtils.parseReplicaSet(element, builder);
registerServerAddressPropertyEditor(parserContext.getRegistry());
ParsingUtils.registerWriteConcernPropertyEditor(parserContext.getRegistry());
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO;
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mongo", source));
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
parserContext.registerBeanComponent(mongoComponent);
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(registerServerAddressPropertyEditor());
parserContext.registerBeanComponent(serverAddressPropertyEditor);
BeanComponentDefinition writeConcernPropertyEditor = helper.getComponent(MongoParsingUtils
.getWriteConcernPropertyEditorBuilder());
parserContext.registerBeanComponent(writeConcernPropertyEditor);
parserContext.popAndRegisterContainingComponent();
return mongoComponent.getBeanDefinition();
}
/**
* One should only register one bean definition but want to have the convenience of using
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
* container.
*
* @param parserContext the ParserContext to
*/
private BeanDefinitionBuilder registerServerAddressPropertyEditor() {
private void registerServerAddressPropertyEditor(BeanDefinitionRegistry registry) {
BeanDefinitionBuilder customEditorConfigurer = BeanDefinitionBuilder
.genericBeanDefinition(CustomEditorConfigurer.class);
Map<String, String> customEditors = new ManagedMap<String, String>();
customEditors.put("com.mongodb.ServerAddress[]",
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
builder.addPropertyValue("customEditors", customEditors);
return builder;
customEditorConfigurer.addPropertyValue("customEditors", customEditors);
BeanDefinitionReaderUtils.registerWithGeneratedName(customEditorConfigurer.getBeanDefinition(), registry);
}
}
@Override
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
throws BeanDefinitionStoreException {
String name = super.resolveId(element, definition, parserContext);
if (!StringUtils.hasText(name)) {
name = "mongo";
}
return name;
}
}

View File

@@ -1,103 +0,0 @@
/*
* Copyright 2011-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.springframework.data.config.ParsingUtils.*;
import java.util.Map;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.CustomEditorConfigurer;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.data.mongodb.core.MongoOptionsFactoryBean;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
/**
* Utility methods for {@link BeanDefinitionParser} implementations for MongoDB.
*
* @author Mark Pollack
* @author Oliver Gierke
*/
abstract class MongoParsingUtils {
private MongoParsingUtils() {
}
/**
* Parses the mongo replica-set element.
*
* @param parserContext the parser context
* @param element the mongo element
* @param mongoBuilder the bean definition builder to populate
* @return
*/
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
}
/**
* Parses the mongo:options sub-element. Populates the given attribute factory with the proper attributes.
*
* @return true if parsing actually occured, false otherwise
*/
static boolean parseMongoOptions(Element element, BeanDefinitionBuilder mongoBuilder) {
Element optionsElement = DomUtils.getChildElementByTagName(element, "options");
if (optionsElement == null) {
return false;
}
BeanDefinitionBuilder optionsDefBuilder = BeanDefinitionBuilder
.genericBeanDefinition(MongoOptionsFactoryBean.class);
setPropertyValue(optionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
setPropertyValue(optionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
"threadsAllowedToBlockForConnectionMultiplier");
setPropertyValue(optionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
setPropertyValue(optionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
setPropertyValue(optionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
setPropertyValue(optionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
setPropertyValue(optionsDefBuilder, optionsElement, "auto-connect-retry", "autoConnectRetry");
setPropertyValue(optionsDefBuilder, optionsElement, "max-auto-connect-retry-time", "maxAutoConnectRetryTime");
setPropertyValue(optionsDefBuilder, optionsElement, "write-number", "writeNumber");
setPropertyValue(optionsDefBuilder, optionsElement, "write-timeout", "writeTimeout");
setPropertyValue(optionsDefBuilder, optionsElement, "write-fsync", "writeFsync");
setPropertyValue(optionsDefBuilder, optionsElement, "slave-ok", "slaveOk");
mongoBuilder.addPropertyValue("mongoOptions", optionsDefBuilder.getBeanDefinition());
return true;
}
/**
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
* {@link WriteConcernPropertyEditor}.
*
* @return
*/
static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() {
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
customEditors.put("com.mongodb.WriteConcern", WriteConcernPropertyEditor.class);
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
builder.addPropertyValue("customEditors", customEditors);
return builder;
}
}

View File

@@ -0,0 +1,141 @@
/*
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import java.util.Map;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.CustomEditorConfigurer;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionReaderUtils;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.mongodb.core.MongoOptionsFactoryBean;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
abstract class ParsingUtils {
/**
* Parses the mongo replica-set element.
*
* @param parserContext the parser context
* @param element the mongo element
* @param mongoBuilder the bean definition builder to populate
* @return true if parsing actually occured, false otherwise
*/
static boolean parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
String replicaSetString = element.getAttribute("replica-set");
if (StringUtils.hasText(replicaSetString)) {
mongoBuilder.addPropertyValue("replicaSetSeeds", replicaSetString);
}
return true;
}
/**
* Parses the mongo:options sub-element. Populates the given attribute factory with the proper attributes.
*
* @return true if parsing actually occured, false otherwise
*/
static boolean parseMongoOptions(Element element, BeanDefinitionBuilder mongoBuilder) {
Element optionsElement = DomUtils.getChildElementByTagName(element, "options");
if (optionsElement == null) {
return false;
}
BeanDefinitionBuilder optionsDefBuilder = BeanDefinitionBuilder
.genericBeanDefinition(MongoOptionsFactoryBean.class);
setPropertyValue(optionsElement, optionsDefBuilder, "connections-per-host", "connectionsPerHost");
setPropertyValue(optionsElement, optionsDefBuilder, "threads-allowed-to-block-for-connection-multiplier",
"threadsAllowedToBlockForConnectionMultiplier");
setPropertyValue(optionsElement, optionsDefBuilder, "max-wait-time", "maxWaitTime");
setPropertyValue(optionsElement, optionsDefBuilder, "connect-timeout", "connectTimeout");
setPropertyValue(optionsElement, optionsDefBuilder, "socket-timeout", "socketTimeout");
setPropertyValue(optionsElement, optionsDefBuilder, "socket-keep-alive", "socketKeepAlive");
setPropertyValue(optionsElement, optionsDefBuilder, "auto-connect-retry", "autoConnectRetry");
setPropertyValue(optionsElement, optionsDefBuilder, "max-auto-connect-retry-time", "maxAutoConnectRetryTime");
setPropertyValue(optionsElement, optionsDefBuilder, "write-number", "writeNumber");
setPropertyValue(optionsElement, optionsDefBuilder, "write-timeout", "writeTimeout");
setPropertyValue(optionsElement, optionsDefBuilder, "write-fsync", "writeFsync");
setPropertyValue(optionsElement, optionsDefBuilder, "slave-ok", "slaveOk");
mongoBuilder.addPropertyValue("mongoOptions", optionsDefBuilder.getBeanDefinition());
return true;
}
static void setPropertyValue(Element element, BeanDefinitionBuilder builder, String attrName, String propertyName) {
String attr = element.getAttribute(attrName);
if (StringUtils.hasText(attr)) {
builder.addPropertyValue(propertyName, attr);
}
}
/**
* Sets the property with the given attribute name on the given {@link BeanDefinitionBuilder} to the value of the
* attribute with the given name.
*
* @param element must not be {@literal null}.
* @param builder must not be {@literal null}.
* @param attrName must not be {@literal null} or empty.
*/
static void setPropertyValue(Element element, BeanDefinitionBuilder builder, String attrName) {
String attr = element.getAttribute(attrName);
if (StringUtils.hasText(attr)) {
builder.addPropertyValue(attrName, attr);
}
}
/**
* Returns the {@link BeanDefinition} built by the given {@link BeanDefinitionBuilder} enriched with source
* information derived from the given {@link Element}.
*
* @param builder must not be {@literal null}.
* @param context must not be {@literal null}.
* @param element must not be {@literal null}.
* @return
*/
static AbstractBeanDefinition getSourceBeanDefinition(BeanDefinitionBuilder builder, ParserContext context,
Element element) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(element));
return definition;
}
/**
* Registers a {@link WriteConcernPropertyEditor} in the given {@link BeanDefinitionRegistry}.
*
* @param registry must not be {@literal null}.
*/
static void registerWriteConcernPropertyEditor(BeanDefinitionRegistry registry) {
Assert.notNull(registry);
BeanDefinitionBuilder customEditorConfigurer = BeanDefinitionBuilder
.genericBeanDefinition(CustomEditorConfigurer.class);
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
customEditors.put("com.mongodb.WriteConcern", WriteConcernPropertyEditor.class);
customEditorConfigurer.addPropertyValue("customEditors", customEditors);
BeanDefinitionReaderUtils.registerWithGeneratedName(customEditorConfigurer.getBeanDefinition(), registry);
}
}

View File

@@ -17,11 +17,7 @@ package org.springframework.data.mongodb.config;
import java.beans.PropertyEditorSupport;
import java.net.UnknownHostException;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import com.mongodb.ServerAddress;
@@ -34,8 +30,6 @@ import com.mongodb.ServerAddress;
*/
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
/*
* (non-Javadoc)
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
@@ -44,49 +38,21 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
public void setAsText(String replicaSetString) {
String[] replicaSetStringArray = StringUtils.commaDelimitedListToStringArray(replicaSetString);
Set<ServerAddress> serverAddresses = new HashSet<ServerAddress>(replicaSetStringArray.length);
ServerAddress[] serverAddresses = new ServerAddress[replicaSetStringArray.length];
for (String element : replicaSetStringArray) {
for (int i = 0; i < replicaSetStringArray.length; i++) {
ServerAddress address = parseServerAddress(element);
String[] hostAndPort = StringUtils.delimitedListToStringArray(replicaSetStringArray[i], ":");
if (address != null) {
serverAddresses.add(address);
try {
serverAddresses[i] = new ServerAddress(hostAndPort[0], Integer.parseInt(hostAndPort[1]));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Could not parse port " + hostAndPort[1], e);
} catch (UnknownHostException e) {
throw new IllegalArgumentException("Could not parse host " + hostAndPort[0], e);
}
}
if (serverAddresses.isEmpty()) {
throw new IllegalArgumentException(
"Could not resolve at least one server of the replica set configuration! Validate your config!");
}
setValue(serverAddresses.toArray(new ServerAddress[serverAddresses.size()]));
}
/**
* Parses the given source into a {@link ServerAddress}.
*
* @param source
* @return the
*/
private ServerAddress parseServerAddress(String source) {
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
return null;
}
try {
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
Integer.parseInt(hostAndPort[1]));
} catch (UnknownHostException e) {
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
} catch (NumberFormatException e) {
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
}
return null;
setValue(serverAddresses);
}
}

View File

@@ -1,38 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import org.springframework.core.convert.converter.Converter;
import com.mongodb.WriteConcern;
/**
* Converter to create {@link WriteConcern} instances from String representations.
*
* @author Oliver Gierke
*/
public class StringToWriteConcernConverter implements Converter<String, WriteConcern> {
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
public WriteConcern convert(String source) {
WriteConcern writeConcern = WriteConcern.valueOf(source);
return writeConcern != null ? writeConcern : new WriteConcern(source);
}
}

View File

@@ -16,11 +16,14 @@
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.data.mongodb.core.query.Order;
import org.springframework.util.Assert;
@@ -120,27 +123,25 @@ public class DefaultIndexOperations implements IndexOperations {
return getIndexData(dbObjectList);
}
@SuppressWarnings("unchecked")
private List<IndexInfo> getIndexData(List<DBObject> dbObjectList) {
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
for (DBObject ix : dbObjectList) {
Map<String, Order> keyOrderMap = new LinkedHashMap<String, Order>();
DBObject keyDbObject = (DBObject) ix.get("key");
int numberOfElements = keyDbObject.keySet().size();
Iterator<?> entries = keyDbObject.toMap().entrySet().iterator();
List<IndexField> indexFields = new ArrayList<IndexField>(numberOfElements);
for (String key : keyDbObject.keySet()) {
Object value = keyDbObject.get(key);
if (Integer.valueOf(1).equals(value)) {
indexFields.add(IndexField.create(key, Order.ASCENDING));
} else if (Integer.valueOf(-1).equals(value)) {
indexFields.add(IndexField.create(key, Order.DESCENDING));
} else if ("2d".equals(value)) {
indexFields.add(IndexField.geo(key));
while (entries.hasNext()) {
Entry<Object, Integer> thisEntry = (Entry<Object, Integer>) entries.next();
String key = thisEntry.getKey().toString();
int value = thisEntry.getValue();
if (value == 1) {
keyOrderMap.put(key, Order.ASCENDING);
} else {
keyOrderMap.put(key, Order.DESCENDING);
}
}
@@ -150,11 +151,12 @@ public class DefaultIndexOperations implements IndexOperations {
boolean dropDuplicates = ix.containsField("dropDups") ? (Boolean) ix.get("dropDups") : false;
boolean sparse = ix.containsField("sparse") ? (Boolean) ix.get("sparse") : false;
indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse));
indexInfoList.add(new IndexInfo(keyOrderMap, name, unique, dropDuplicates, sparse));
}
return indexInfoList;
}
});
}
}

View File

@@ -20,7 +20,6 @@ import java.util.List;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexInfo;
/**
* Index operations on a collection.
*

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2002-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,28 +15,32 @@
*/
package org.springframework.data.mongodb.core;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.jmx.export.annotation.ManagedOperation;
import org.springframework.jmx.export.annotation.ManagedResource;
import org.springframework.util.Assert;
import com.mongodb.DB;
import com.mongodb.Mongo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.jmx.export.annotation.ManagedOperation;
import org.springframework.jmx.export.annotation.ManagedResource;
/**
* Mongo server administration exposed via JMX annotations
*
* @author Mark Pollack
*/
@ManagedResource(description = "Mongo Admin Operations")
public class MongoAdmin implements MongoAdminOperations {
private final Mongo mongo;
/**
* Logger available to subclasses
*/
protected final Log logger = LogFactory.getLog(getClass());
private Mongo mongo;
private String username;
private String password;
public MongoAdmin(Mongo mongo) {
Assert.notNull(mongo);
this.mongo = mongo;
}
@@ -79,10 +83,11 @@ public class MongoAdmin implements MongoAdminOperations {
* @param password The password to use
*/
public void setPassword(String password) {
this.password = password;
}
DB getDB(String databaseName) {
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password));
return MongoDbUtils.getDB(mongo, databaseName, username, password == null ? null : password.toCharArray());
}
}

View File

@@ -15,16 +15,14 @@
*/
package org.springframework.data.mongodb.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.authentication.UserCredentials;
import com.mongodb.DB;
import com.mongodb.Mongo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.mongodb.CannotGetMongoDbConnectionException;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.util.Assert;
import com.mongodb.DB;
import com.mongodb.Mongo;
/**
* Helper class featuring helper methods for internal MongoDb classes.
* <p/>
@@ -38,7 +36,7 @@ import com.mongodb.Mongo;
*/
public abstract class MongoDbUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbUtils.class);
private static final Log LOGGER = LogFactory.getLog(MongoDbUtils.class);
/**
* Private constructor to prevent instantiation.
@@ -50,90 +48,74 @@ public abstract class MongoDbUtils {
/**
* Obtains a {@link DB} connection for the given {@link Mongo} instance and database name
*
* @param mongo the {@link Mongo} instance, must not be {@literal null}.
* @param databaseName the database name, must not be {@literal null} or empty.
* @return the {@link DB} connection
* @param mongo The {@link Mongo} instance
* @param databaseName The database name
* @return The {@link DB} connection
*/
public static DB getDB(Mongo mongo, String databaseName) {
return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true);
return doGetDB(mongo, databaseName, null, null, true);
}
/**
* Obtains a {@link DB} connection for the given {@link Mongo} instance and database name
*
* @param mongo the {@link Mongo} instance, must not be {@literal null}.
* @param databaseName the database name, must not be {@literal null} or empty.
* @param credentials the credentials to use, must not be {@literal null}.
* @return the {@link DB} connection
* @param mongo The {@link Mongo} instance
* @param databaseName The database name
* @param username The username to authenticate with
* @param password The password to authenticate with
* @return The {@link DB} connection
*/
public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials) {
Assert.notNull(mongo, "No Mongo instance specified!");
Assert.hasText(databaseName, "Database name must be given!");
Assert.notNull(credentials, "Credentials must not be null, use UserCredentials.NO_CREDENTIALS!");
return doGetDB(mongo, databaseName, credentials, true);
public static DB getDB(Mongo mongo, String databaseName, String username, char[] password) {
return doGetDB(mongo, databaseName, username, password, true);
}
private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate) {
public static DB doGetDB(Mongo mongo, String databaseName, String username, char[] password, boolean allowCreate) {
Assert.notNull(mongo, "No Mongo instance specified");
DbHolder dbHolder = (DbHolder) TransactionSynchronizationManager.getResource(mongo);
if (dbHolder != null && !dbHolder.isEmpty()) {
// pre-bound Mongo DB
DB db = null;
if (TransactionSynchronizationManager.isSynchronizationActive() && dbHolder.doesNotHoldNonDefaultDB()) {
db = dbHolder.getDB(databaseName);
// Spring transaction management is active ->
db = dbHolder.getDB();
if (db != null && !dbHolder.isSynchronizedWithTransaction()) {
LOGGER.debug("Registering Spring transaction synchronization for existing MongoDB {}.", databaseName);
LOGGER.debug("Registering Spring transaction synchronization for existing Mongo DB");
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(dbHolder, mongo));
dbHolder.setSynchronizedWithTransaction(true);
}
}
if (db != null) {
return db;
}
}
LOGGER.debug("Getting Mongo Database name=[{}]", databaseName);
LOGGER.trace("Getting Mongo Database name=[" + databaseName + "]");
DB db = mongo.getDB(databaseName);
boolean credentialsGiven = credentials.hasUsername() && credentials.hasPassword();
boolean credentialsGiven = username != null && password != null;
if (credentialsGiven && !db.isAuthenticated()) {
String username = credentials.getUsername();
String password = credentials.hasPassword() ? credentials.getPassword() : null;
if (!db.authenticate(username, password == null ? null : password.toCharArray())) {
// Note, can only authenticate once against the same com.mongodb.DB object.
if (!db.authenticate(username, password)) {
throw new CannotGetMongoDbConnectionException("Failed to authenticate to database [" + databaseName
+ "], username = [" + username + "], password = [" + password + "]", databaseName, credentials);
+ "], username = [" + username + "], password = [" + new String(password) + "]", databaseName, username,
password);
}
}
// Use same Session for further Mongo actions within the transaction.
// Thread object will get removed by synchronization at transaction completion.
if (TransactionSynchronizationManager.isSynchronizationActive()) {
LOGGER.debug("Registering Spring transaction synchronization for MongoDB instance {}.", databaseName);
// We're within a Spring-managed transaction, possibly from JtaTransactionManager.
LOGGER.debug("Registering Spring transaction synchronization for new Hibernate Session");
DbHolder holderToUse = dbHolder;
if (holderToUse == null) {
holderToUse = new DbHolder(databaseName, db);
holderToUse = new DbHolder(db);
} else {
holderToUse.addDB(databaseName, db);
holderToUse.addDB(db);
}
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
holderToUse.setSynchronizedWithTransaction(true);
if (holderToUse != dbHolder) {
TransactionSynchronizationManager.bindResource(mongo, holderToUse);
}
@@ -157,7 +139,6 @@ public abstract class MongoDbUtils {
* @return whether the DB is transactional
*/
public static boolean isDBTransactional(DB db, Mongo mongo) {
if (mongo == null) {
return false;
}
@@ -171,7 +152,6 @@ public abstract class MongoDbUtils {
* @param db the DB to close (may be <code>null</code>)
*/
public static void closeDB(DB db) {
if (db != null) {
LOGGER.debug("Closing Mongo DB object");
try {

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,14 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.Arrays;
import java.util.List;
import org.springframework.beans.factory.DisposableBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mongodb.CannotGetMongoDbConnectionException;
@@ -38,10 +39,12 @@ import com.mongodb.WriteConcern;
* @author Oliver Gierke
* @since 1.0
*/
public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, DisposableBean,
PersistenceExceptionTranslator {
public class MongoFactoryBean implements FactoryBean<Mongo>, PersistenceExceptionTranslator {
private Mongo mongo;
/**
* Logger, available to subclasses.
*/
protected final Log logger = LogFactory.getLog(getClass());
private MongoOptions mongoOptions;
private String host;
@@ -86,7 +89,34 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
}
public Mongo getObject() throws Exception {
Mongo mongo;
ServerAddress defaultOptions = new ServerAddress();
if (mongoOptions == null) {
mongoOptions = new MongoOptions();
}
if (replicaPair != null) {
if (replicaPair.size() < 2) {
throw new CannotGetMongoDbConnectionException("A replica pair must have two server entries");
}
mongo = new Mongo(replicaPair.get(0), replicaPair.get(1), mongoOptions);
} else if (replicaSetSeeds != null) {
mongo = new Mongo(replicaSetSeeds, mongoOptions);
} else {
String mongoHost = host != null ? host : defaultOptions.getHost();
mongo = port != null ? new Mongo(new ServerAddress(mongoHost, port), mongoOptions) : new Mongo(mongoHost,
mongoOptions);
}
if (writeConcern != null) {
mongo.setWriteConcern(writeConcern);
}
return mongo;
}
/*
@@ -112,45 +142,4 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
return exceptionTranslator.translateExceptionIfPossible(ex);
}
/*
* (non-Javadoc)
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
*/
public void afterPropertiesSet() throws Exception {
Mongo mongo;
ServerAddress defaultOptions = new ServerAddress();
if (mongoOptions == null) {
mongoOptions = new MongoOptions();
}
if (replicaPair != null) {
if (replicaPair.size() < 2) {
throw new CannotGetMongoDbConnectionException("A replica pair must have two server entries");
}
mongo = new Mongo(replicaPair.get(0), replicaPair.get(1), mongoOptions);
} else if (replicaSetSeeds != null) {
mongo = new Mongo(replicaSetSeeds, mongoOptions);
} else {
String mongoHost = host != null ? host : defaultOptions.getHost();
mongo = port != null ? new Mongo(new ServerAddress(mongoHost, port), mongoOptions) : new Mongo(mongoHost,
mongoOptions);
}
if (writeConcern != null) {
mongo.setWriteConcern(writeConcern);
}
this.mongo = mongo;
}
/*
* (non-Javadoc)
* @see org.springframework.beans.factory.DisposableBean#destroy()
*/
public void destroy() throws Exception {
this.mongo.close();
}
}

View File

@@ -1,11 +1,15 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* Licensed under t
import org.springframework.util.ResourceUtils;
import org.springframework.data.convert.EntityReader;
he Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -29,9 +33,9 @@ import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
@@ -46,7 +50,6 @@ import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.convert.EntityReader;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.BeanWrapper;
import org.springframework.data.mapping.model.MappingException;
@@ -54,7 +57,6 @@ import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.MongoWriter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.geo.Distance;
import org.springframework.data.mongodb.core.geo.GeoResult;
import org.springframework.data.mongodb.core.geo.GeoResults;
@@ -106,11 +108,10 @@ import com.mongodb.util.JSON;
* @author Graeme Rocher
* @author Mark Pollack
* @author Oliver Gierke
* @author Amol Nayak
*/
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class);
private static final String ID = "_id";
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
@SuppressWarnings("serial")
@@ -332,12 +333,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
Assert.notNull(query);
DBObject queryObject = query.getQueryObject();
DBObject sortObject = query.getSortObject();
DBObject fieldsObject = query.getFieldsObject();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: $s",
SerializationUtils.serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
LOGGER.debug("find using query: " + queryObject + " fields: " + fieldsObject + " in collection: "
+ collectionName);
}
this.executeQueryInternal(new FindCallback(queryObject, fieldsObject), preparer, dch, collectionName);
@@ -732,13 +732,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName,
entityClass, dbDoc, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult wr;
if (writeConcernToUse == null) {
wr = collection.insert(dbDoc);
collection.insert(dbDoc);
} else {
wr = collection.insert(dbDoc, writeConcernToUse);
collection.insert(dbDoc, writeConcernToUse);
}
handleAnyWriteResultErrors(wr, dbDoc, "insert");
return dbDoc.get(ID);
}
});
@@ -757,13 +755,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null,
null, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult wr;
if (writeConcernToUse == null) {
wr = collection.insert(dbDocList);
collection.insert(dbDocList);
} else {
wr = collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
}
handleAnyWriteResultErrors(wr, null, "insert_list");
return null;
}
});
@@ -790,13 +786,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
dbDoc, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult wr;
if (writeConcernToUse == null) {
wr = collection.save(dbDoc);
collection.save(dbDoc);
} else {
wr = collection.save(dbDoc, writeConcernToUse);
collection.save(dbDoc, writeConcernToUse);
}
handleAnyWriteResultErrors(wr, dbDoc, "save");
return dbDoc.get(ID);
}
});
@@ -836,8 +830,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DBObject queryObj = query == null ? new BasicDBObject()
: mapper.getMappedObject(query.getQueryObject(), entity);
DBObject updateObj = update == null ? new BasicDBObject() : mapper.getMappedObject(update.getUpdateObject(),
entity);
DBObject updateObj = update.getUpdateObject();
for (String key : updateObj.keySet()) {
updateObj.put(key, mongoConverter.convertToMongoType(updateObj.get(key)));
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
@@ -881,7 +878,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
/**
* Returns a {@link Query} for the given entity by its id.
*
*
* @param object must not be {@literal null}.
* @return
*/
@@ -941,7 +938,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
entityClass, null, queryObject);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("remove using query: " + dboq + " in collection: " + collection.getName());
LOGGER.debug("remove using query: " + queryObject + " in collection: " + collection.getName());
}
if (writeConcernToUse == null) {
wr = collection.remove(dboq);
@@ -999,16 +996,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
LOGGER.debug("Executing MapReduce on collection [" + command.getInput() + "], mapFunction [" + mapFunc
+ "], reduceFunction [" + reduceFunc + "]");
}
CommandResult commandResult = command.getOutputType() == MapReduceCommand.OutputType.INLINE ? executeCommand(
commandObject, getDb().getOptions()) : executeCommand(commandObject);
handleCommandError(commandResult, commandObject);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("MapReduce command result = [%s]",
SerializationUtils.serializeToJsonSafely(commandObject)));
CommandResult commandResult = null;
try {
if (command.getOutputType() == MapReduceCommand.OutputType.INLINE) {
commandResult = executeCommand(commandObject, getDb().getOptions());
} else {
commandResult = executeCommand(commandObject);
}
commandResult.throwOnError();
} catch (RuntimeException ex) {
this.potentiallyConvertRuntimeException(ex);
}
String error = commandResult.getErrorMessage();
if (error != null) {
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
+ commandObject);
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("MapReduce command result = [" + commandResult + "]");
}
MapReduceOutput mapReduceOutput = new MapReduceOutput(inputCollection, commandObject, commandResult);
List<T> mappedResults = new ArrayList<T>();
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
@@ -1033,7 +1040,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
if (criteria == null) {
dbo.put("cond", null);
} else {
dbo.put("cond", mapper.getMappedObject(criteria.getCriteriaObject(), null));
dbo.put("cond", criteria.getCriteriaObject());
}
// If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and
// convert to DBObject
@@ -1059,12 +1066,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DBObject commandObject = new BasicDBObject("group", dbo);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Executing Group with DBObject [%s]",
SerializationUtils.serializeToJsonSafely(commandObject)));
LOGGER.debug("Executing Group with DBObject [" + commandObject.toString() + "]");
}
CommandResult commandResult = null;
try {
commandResult = executeCommand(commandObject, getDb().getOptions());
commandResult.throwOnError();
} catch (RuntimeException ex) {
this.potentiallyConvertRuntimeException(ex);
}
String error = commandResult.getErrorMessage();
if (error != null) {
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
+ commandObject);
}
CommandResult commandResult = executeCommand(commandObject, getDb().getOptions());
handleCommandError(commandResult, commandObject);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Group command result = [" + commandResult + "]");
@@ -1172,7 +1187,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
/**
* Create the specified collection using the provided options
*
*
* @param collectionName
* @param collectionOptions
* @return the collection that was created
@@ -1194,7 +1209,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter
* <p/>
* The query document is specified as a standard DBObject and so is the fields specification.
*
*
* @param collectionName name of the collection to retrieve the objects from
* @param query the query document that specifies the criteria used to find a record
* @param fields the document that specifies the fields to be returned
@@ -1219,7 +1234,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* The query document is specified as a standard DBObject and so is the fields specification.
* <p/>
* Can be overridden by subclasses.
*
*
* @param collectionName name of the collection to retrieve the objects from
* @param query the query document that specifies the criteria used to find a record
* @param fields the document that specifies the fields to be returned
@@ -1249,7 +1264,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* Map the results of an ad-hoc query on the default MongoDB collection to a List using the template's converter.
* <p/>
* The query document is specified as a standard DBObject and so is the fields specification.
*
*
* @param collectionName name of the collection to retrieve the objects from
* @param query the query document that specifies the criteria used to find a record
* @param fields the document that specifies the fields to be returned
@@ -1288,7 +1303,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* The first document that matches the query is returned and also removed from the collection in the database.
* <p/>
* The query document is specified as a standard DBObject and so is the fields specification.
*
*
* @param collectionName name of the collection to retrieve the objects from
* @param query the query document that specifies the criteria used to find a record
* @param entityClass the parameterized type of the returned list.
@@ -1322,20 +1337,18 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
updateObj.put(key, mongoConverter.convertToMongoType(updateObj.get(key)));
}
DBObject mappedQuery = mapper.getMappedObject(query, entity);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
+ " for class: " + entityClass + " and update: " + updateObj + " in collection: " + collectionName);
LOGGER.debug("findAndModify using query: " + query + " fields: " + fields + " sort: " + sort + " for class: "
+ entityClass + " and update: " + updateObj + " in collection: " + collectionName);
}
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, updateObj, options),
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
return executeFindOneInternal(new FindAndModifyCallback(mapper.getMappedObject(query, entity), fields, sort,
updateObj, options), new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
}
/**
* Populates the id property of the saved object, if it's not set already.
*
*
* @param savedObject
* @param id
*/
@@ -1351,19 +1364,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
return;
}
ConversionService conversionService = mongoConverter.getConversionService();
BeanWrapper<PersistentEntity<Object, ?>, Object> wrapper = BeanWrapper.create(savedObject, conversionService);
try {
Object idValue = wrapper.getProperty(idProp, idProp.getType(), true);
if (idValue != null) {
return;
}
wrapper.setProperty(idProp, id);
BeanWrapper.create(savedObject, mongoConverter.getConversionService()).setProperty(idProp, id);
return;
} catch (IllegalAccessException e) {
throw new MappingException(e.getMessage(), e);
} catch (InvocationTargetException e) {
@@ -1388,7 +1391,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* <li>Execute the given {@link ConnectionCallback} for a {@link DBObject}.</li>
* <li>Apply the given {@link DbObjectCallback} to each of the {@link DBObject}s to obtain the result.</li>
* <ol>
*
*
* @param <T>
* @param collectionCallback the callback to retrieve the {@link DBObject} with
* @param objectCallback the {@link DbObjectCallback} to transform {@link DBObject}s into the actual domain type
@@ -1511,16 +1514,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
String error = wr.getError();
if (error != null) {
String message;
if (operation.equals("insert") || operation.equals("save")) {
// assuming the insert operations will begin with insert string
message = String.format("Insert/Save for %s failed: %s", query, error);
} else if (operation.equals("insert_list")) {
message = String.format("Insert list failed: %s", error);
} else {
message = String.format("Execution of %s%s failed: %s", operation,
query == null ? "" : "' using '" + query.toString() + "' query", error);
}
String message = String.format("Execution of %s%s failed: %s", operation, query == null ? "" : "' using '"
+ query.toString() + "' query", error);
if (WriteResultChecking.EXCEPTION == this.writeResultChecking) {
throw new DataIntegrityViolationException(message);
@@ -1543,27 +1539,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
return resolved == null ? ex : resolved;
}
/**
* Inspects the given {@link CommandResult} for erros and potentially throws an
* {@link InvalidDataAccessApiUsageException} for that error.
*
* @param result must not be {@literal null}.
* @param source must not be {@literal null}.
*/
private void handleCommandError(CommandResult result, DBObject source) {
try {
result.throwOnError();
} catch (MongoException ex) {
String error = result.getErrorMessage();
error = error == null ? "NO MESSAGE" : error;
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
+ source, ex);
}
}
private static final MongoConverter getDefaultMongoConverter(MongoDbFactory factory) {
MappingMongoConverter converter = new MappingMongoConverter(factory, new MongoMappingContext());
converter.afterPropertiesSet();

View File

@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.convert;
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.Arrays;
@@ -25,11 +25,11 @@ import org.bson.types.ObjectId;
import org.springframework.core.convert.ConversionException;
import org.springframework.core.convert.ConversionService;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.util.Assert;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
@@ -41,9 +41,6 @@ import com.mongodb.DBObject;
*/
public class QueryMapper {
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
private static final String N_OR_PATTERN = "\\$.*or";
private final ConversionService conversionService;
private final MongoConverter converter;
@@ -62,8 +59,8 @@ public class QueryMapper {
* Replaces the property keys used in the given {@link DBObject} with the appropriate keys by using the
* {@link PersistentEntity} metadata.
*
* @param query must not be {@literal null}.
* @param entity can be {@literal null}.
* @param query
* @param entity
* @return
*/
public DBObject getMappedObject(DBObject query, MongoPersistentEntity<?> entity) {
@@ -71,10 +68,8 @@ public class QueryMapper {
DBObject newDbo = new BasicDBObject();
for (String key : query.keySet()) {
String newKey = key;
Object value = query.get(key);
if (isIdKey(key, entity)) {
if (value instanceof DBObject) {
DBObject valueDbo = (DBObject) value;
@@ -86,51 +81,34 @@ public class QueryMapper {
}
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
} else {
value = getMappedObject((DBObject) value, null);
value = getMappedObject((DBObject) value, entity);
}
} else {
value = convertId(value);
}
newKey = "_id";
} else if (key.matches(N_OR_PATTERN)) {
} else if (key.startsWith("$") && key.endsWith("or")) {
// $or/$nor
Iterable<?> conditions = (Iterable<?>) value;
BasicBSONList newConditions = new BasicBSONList();
Iterator<?> iter = conditions.iterator();
while (iter.hasNext()) {
newConditions.add(getMappedObject((DBObject) iter.next(), null));
newConditions.add(getMappedObject((DBObject) iter.next(), entity));
}
value = newConditions;
} else if (key.equals("$ne")) {
value = convertId(value);
} else if (value instanceof DBObject) {
newDbo.put(newKey, getMappedObject((DBObject) value, entity));
continue;
}
newDbo.put(newKey, convertSimpleOrDBObject(value, null));
newDbo.put(newKey, converter.convertToMongoType(value));
}
return newDbo;
}
/**
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
*
* @param source
* @param entity
* @return
*/
private Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
if (source instanceof BasicDBList) {
return converter.convertToMongoType(source);
}
if (source instanceof DBObject) {
return getMappedObject((DBObject) source, entity);
}
return converter.convertToMongoType(source);
}
/**
* Returns whether the given key will be considered an id key.
*
@@ -140,17 +118,12 @@ public class QueryMapper {
*/
private boolean isIdKey(String key, MongoPersistentEntity<?> entity) {
if (entity == null) {
return false;
}
MongoPersistentProperty idProperty = entity.getIdProperty();
if (idProperty != null) {
if (null != entity && entity.getIdProperty() != null) {
MongoPersistentProperty idProperty = entity.getIdProperty();
return idProperty.getName().equals(key) || idProperty.getFieldName().equals(key);
}
return DEFAULT_ID_NAMES.contains(key);
return Arrays.asList("id", "_id").contains(key);
}
/**

View File

@@ -1,110 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.springframework.core.convert.converter.Converter;
import com.mongodb.DBObject;
import com.mongodb.util.JSON;
/**
* Utility methods for JSON serialization.
*
* @author Oliver Gierke
*/
public abstract class SerializationUtils {
private SerializationUtils() {
}
/**
* Serializes the given object into pseudo-JSON meaning it's trying to create a JSON representation as far as possible
* but falling back to the given object's {@link Object#toString()} method if it's not serializable. Useful for
* printing raw {@link DBObject}s containing complex values before actually converting them into Mongo native types.
*
* @param value
* @return
*/
public static String serializeToJsonSafely(Object value) {
if (value == null) {
return null;
}
try {
return JSON.serialize(value);
} catch (Exception e) {
if (value instanceof Collection) {
return toString((Collection<?>) value);
} else if (value instanceof Map) {
return toString((Map<?, ?>) value);
} else if (value instanceof DBObject) {
return toString(((DBObject) value).toMap());
} else {
return String.format("{ $java : %s }", value.toString());
}
}
}
private static String toString(Map<?, ?> source) {
return iterableToDelimitedString(source.entrySet(), "{ ", " }", new Converter<Entry<?, ?>, Object>() {
public Object convert(Entry<?, ?> source) {
return String.format("\"%s\" : %s", source.getKey(), serializeToJsonSafely(source.getValue()));
}
});
}
private static String toString(Collection<?> source) {
return iterableToDelimitedString(source, "[ ", " ]", new Converter<Object, Object>() {
public Object convert(Object source) {
return serializeToJsonSafely(source);
}
});
}
/**
* Creates a string representation from the given {@link Iterable} prepending the postfix, applying the given
* {@link Converter} to each element before adding it to the result {@link String}, concatenating each element with
* {@literal ,} and applying the postfix.
*
* @param source
* @param prefix
* @param postfix
* @param transformer
* @return
*/
private static <T> String iterableToDelimitedString(Iterable<T> source, String prefix, String postfix,
Converter<? super T, Object> transformer) {
StringBuilder builder = new StringBuilder(prefix);
Iterator<T> iterator = source.iterator();
while (iterator.hasNext()) {
builder.append(transformer.convert(iterator.next()));
if (iterator.hasNext()) {
builder.append(", ");
}
}
return builder.append(postfix).toString();
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.core;
import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.dao.DataAccessException;
import org.springframework.data.authentication.UserCredentials;
@@ -37,10 +39,12 @@ import com.mongodb.WriteConcern;
*/
public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
protected final Log logger = LogFactory.getLog(getClass());
private final Mongo mongo;
private final String databaseName;
private final boolean mongoInstanceCreated;
private final UserCredentials credentials;
private String username;
private String password;
private WriteConcern writeConcern;
/**
@@ -50,7 +54,12 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
* @param databaseName database name, not be {@literal null}.
*/
public SimpleMongoDbFactory(Mongo mongo, String databaseName) {
this(mongo, databaseName, UserCredentials.NO_CREDENTIALS, false);
Assert.notNull(mongo, "Mongo must not be null");
Assert.hasText(databaseName, "Database name must not be empty");
Assert.isTrue(databaseName.matches("[\\w-]+"),
"Database name must only contain letters, numbers, underscores and dashes!");
this.mongo = mongo;
this.databaseName = databaseName;
}
/**
@@ -58,10 +67,12 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
*
* @param mongo Mongo instance, must not be {@literal null}.
* @param databaseName Database name, must not be {@literal null}.
* @param credentials username and password.
* @param userCredentials username and password must not be {@literal null}.
*/
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials) {
this(mongo, databaseName, credentials, false);
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials userCredentials) {
this(mongo, databaseName);
this.username = userCredentials.getUsername();
this.password = userCredentials.getPassword();
}
/**
@@ -73,21 +84,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
* @see MongoURI
*/
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true);
}
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
boolean mongoInstanceCreated) {
Assert.notNull(mongo, "Mongo must not be null");
Assert.hasText(databaseName, "Database name must not be empty");
Assert.isTrue(databaseName.matches("[\\w-]+"),
"Database name must only contain letters, numbers, underscores and dashes!");
this.mongo = mongo;
this.databaseName = databaseName;
this.mongoInstanceCreated = mongoInstanceCreated;
this.credentials = credentials == null ? UserCredentials.NO_CREDENTIALS : credentials;
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())));
}
/**
@@ -99,6 +96,10 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
this.writeConcern = writeConcern;
}
public WriteConcern getWriteConcern() {
return writeConcern;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
@@ -115,7 +116,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
Assert.hasText(dbName, "Database name must not be empty.");
DB db = MongoDbUtils.getDB(mongo, dbName, credentials);
DB db = MongoDbUtils.getDB(mongo, dbName, username, password == null ? null : password.toCharArray());
if (writeConcern != null) {
db.setWriteConcern(writeConcern);
@@ -125,17 +126,17 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
}
/**
* Clean up the Mongo instance if it was created by the factory itself.
*
* @see DisposableBean#destroy()
* Clean up the Mongo instance.
*/
public void destroy() throws Exception {
if (mongoInstanceCreated) {
mongo.close();
}
mongo.close();
}
private static String parseChars(char[] chars) {
return chars == null ? null : String.valueOf(chars);
public static String parseChars(char[] chars) {
if (chars == null) {
return null;
} else {
return String.valueOf(chars);
}
}
}

View File

@@ -23,7 +23,6 @@ import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.support.ConversionServiceFactory;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.convert.EntityInstantiators;
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter;
@@ -40,7 +39,6 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
protected final GenericConversionService conversionService;
protected CustomConversions conversions = new CustomConversions();
protected EntityInstantiators instantiators = new EntityInstantiators();
/**
* Creates a new {@link AbstractMongoConverter} using the given {@link GenericConversionService}.
@@ -62,15 +60,6 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
this.conversions = conversions;
}
/**
* Registers {@link EntityInstantiators} to customize entity instantiation.
*
* @param instantiators
*/
public void setInstantiators(EntityInstantiators instantiators) {
this.instantiators = instantiators == null ? new EntityInstantiators() : instantiators;
}
/**
* Registers additional converters that will be available when using the {@link ConversionService} directly (e.g. for
* id conversion). These converters are not custom conversions as they'd introduce unwanted conversions (e.g.

View File

@@ -22,8 +22,8 @@ import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.GenericTypeResolver;
import org.springframework.core.convert.TypeDescriptor;
import org.springframework.core.convert.converter.Converter;
@@ -36,10 +36,10 @@ import org.springframework.data.convert.WritingConverter;
import org.springframework.data.mapping.model.SimpleTypeHolder;
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.BinaryToUUIDConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.URLToStringConverter;
import org.springframework.data.mongodb.core.convert.MongoConverters.UUIDToBinaryConverter;
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
import org.springframework.util.Assert;
@@ -54,7 +54,7 @@ import org.springframework.util.Assert;
*/
public class CustomConversions {
private static final Logger LOG = LoggerFactory.getLogger(CustomConversions.class);
private static final Log LOG = LogFactory.getLog(CustomConversions.class);
private static final String READ_CONVERTER_NOT_SIMPLE = "Registering converter from %s to %s as reading converter although it doesn't convert from a Mongo supported type! You might wanna check you annotation setup at the converter implementation.";
private static final String WRITE_CONVERTER_NOT_SIMPLE = "Registering converter from %s to %s as writing converter although it doesn't convert to a Mongo supported type! You might wanna check you annotation setup at the converter implementation.";
@@ -91,8 +91,8 @@ public class CustomConversions {
this.converters.add(StringToBigDecimalConverter.INSTANCE);
this.converters.add(BigIntegerToStringConverter.INSTANCE);
this.converters.add(StringToBigIntegerConverter.INSTANCE);
this.converters.add(URLToStringConverter.INSTANCE);
this.converters.add(StringToURLConverter.INSTANCE);
this.converters.add(UUIDToBinaryConverter.INSTANCE);
this.converters.add(BinaryToUUIDConverter.INSTANCE);
this.converters.addAll(converters);
for (Object c : this.converters) {
@@ -223,7 +223,7 @@ public class CustomConversions {
/**
* Returns the target type we can write an onject of the given source type to. The returned type might be a subclass
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
* oth the given expected type though. If {@code expexctedTargetType} is {@literal null} we will simply return the
* first target type matching or {@literal null} if no conversion can be found.
*
* @param source must not be {@literal null}

View File

@@ -0,0 +1,168 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.convert;
import java.util.HashSet;
import java.util.Set;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentProperty;
import org.springframework.data.mapping.PreferredConstructor;
import org.springframework.data.mapping.PreferredConstructor.Parameter;
import org.springframework.data.mapping.PropertyPath;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.context.PersistentPropertyPath;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.TypeInformation;
import org.springframework.util.Assert;
/**
* Abstraction for a {@link PreferredConstructor} alongside mapping information.
*
* @author Oliver Gierke
*/
class MappedConstructor {
private final Set<MappedConstructor.MappedParameter> parameters;
/**
* Creates a new {@link MappedConstructor} from the given {@link MongoPersistentEntity} and {@link MappingContext}.
*
* @param entity must not be {@literal null}.
* @param context must not be {@literal null}.
*/
public MappedConstructor(MongoPersistentEntity<?> entity,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
Assert.notNull(entity);
Assert.notNull(context);
this.parameters = new HashSet<MappedConstructor.MappedParameter>();
for (Parameter<?> parameter : entity.getPreferredConstructor().getParameters()) {
parameters.add(new MappedParameter(parameter, entity, context));
}
}
/**
* Returns whether the given {@link PersistentProperty} is referenced in a constructor argument of the
* {@link PersistentEntity} backing this {@link MappedConstructor}.
*
* @param property must not be {@literal null}.
* @return
*/
public boolean isConstructorParameter(PersistentProperty<?> property) {
Assert.notNull(property);
for (MappedConstructor.MappedParameter parameter : parameters) {
if (parameter.maps(property)) {
return true;
}
}
return false;
}
/**
* Returns the {@link MappedParameter} for the given {@link Parameter}.
*
* @param parameter must not be {@literal null}.
* @return
*/
public MappedParameter getFor(Parameter<?> parameter) {
for (MappedParameter mappedParameter : parameters) {
if (mappedParameter.parameter.equals(parameter)) {
return mappedParameter;
}
}
throw new IllegalStateException(String.format("Didn't find a MappedParameter for %s!", parameter.toString()));
}
/**
* Abstraction of a {@link Parameter} alongside mapping information.
*
* @author Oliver Gierke
*/
static class MappedParameter {
private final MongoPersistentProperty property;
private final Parameter<?> parameter;
/**
* Creates a new {@link MappedParameter} for the given {@link Parameter}, {@link MongoPersistentProperty} and
* {@link MappingContext}.
*
* @param parameter must not be {@literal null}.
* @param entity must not be {@literal null}.
* @param context must not be {@literal null}.
*/
public MappedParameter(Parameter<?> parameter, MongoPersistentEntity<?> entity,
MappingContext<? extends MongoPersistentEntity<?>, ? extends MongoPersistentProperty> context) {
Assert.notNull(parameter);
Assert.notNull(entity);
Assert.notNull(context);
this.parameter = parameter;
PropertyPath propertyPath = PropertyPath.from(parameter.getName(), entity.getType());
PersistentPropertyPath<? extends MongoPersistentProperty> path = context.getPersistentPropertyPath(propertyPath);
this.property = path == null ? null : path.getLeafProperty();
}
/**
* Returns whether there is a SpEL expression configured for this parameter.
*
* @return
*/
public boolean hasSpELExpression() {
return parameter.getKey() != null;
}
/**
* Returns the field name to be used to lookup the value which in turn shall be converted into the constructor
* parameter.
*
* @return
*/
public String getFieldName() {
return property.getFieldName();
}
/**
* Returns the type of the property backing the {@link Parameter}.
*
* @return
*/
public TypeInformation<?> getPropertyTypeInformation() {
return property.getTypeInformation();
}
/**
* Returns whether the given {@link PersistentProperty} is mapped by the parameter.
*
* @param property
* @return
*/
public boolean maps(PersistentProperty<?> property) {
return this.property.equals(property);
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 by the original author(s).
* Copyright 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,44 +15,44 @@
*/
package org.springframework.data.mongodb.core.convert;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.expression.BeanFactoryResolver;
import org.springframework.core.CollectionFactory;
import org.springframework.core.convert.ConversionException;
import org.springframework.core.convert.support.ConversionServiceFactory;
import org.springframework.data.convert.EntityInstantiator;
import org.springframework.data.convert.TypeMapper;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.AssociationHandler;
import org.springframework.data.mapping.PreferredConstructor.Parameter;
import org.springframework.data.mapping.PropertyHandler;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.BeanWrapper;
import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator;
import org.springframework.data.mapping.model.MappingException;
import org.springframework.data.mapping.model.ParameterValueProvider;
import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider;
import org.springframework.data.mapping.model.PropertyValueProvider;
import org.springframework.data.mapping.model.SpELContext;
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
import org.springframework.data.mapping.model.SpELAwareParameterValueProvider;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.util.TypeInformation;
import org.springframework.expression.Expression;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
@@ -72,7 +72,7 @@ import com.mongodb.DBRef;
*/
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
protected static final Logger log = LoggerFactory.getLogger(MappingMongoConverter.class);
protected static final Log log = LogFactory.getLog(MappingMongoConverter.class);
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
@@ -83,8 +83,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
protected MongoTypeMapper typeMapper;
protected String mapKeyDotReplacement = null;
private SpELContext spELContext;
/**
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
*
@@ -104,8 +102,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
this.mappingContext = mappingContext;
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext);
this.idMapper = new QueryMapper(this);
this.spELContext = new SpELContext(DBObjectPropertyAccessor.INSTANCE);
}
/**
@@ -157,9 +153,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
*/
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
this.spELContext = new SpELContext(this.spELContext, applicationContext);
}
/*
@@ -170,12 +164,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return read(ClassTypeInformation.from(clazz), dbo);
}
protected <S extends Object> S read(TypeInformation<S> type, DBObject dbo) {
return read(type, dbo, null);
}
@SuppressWarnings("unchecked")
protected <S extends Object> S read(TypeInformation<S> type, DBObject dbo, Object parent) {
protected <S extends Object> S read(TypeInformation<S> type, DBObject dbo) {
if (null == dbo) {
return null;
@@ -193,11 +183,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
}
if (typeToUse.isCollectionLike() && dbo instanceof BasicDBList) {
return (S) readCollectionOrArray(typeToUse, (BasicDBList) dbo, parent);
return (S) readCollectionOrArray(typeToUse, (BasicDBList) dbo);
}
if (typeToUse.isMap()) {
return (S) readMap(typeToUse, dbo, parent);
return (S) readMap(typeToUse, dbo);
}
// Retrieve persistent entity info
@@ -207,43 +197,37 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
throw new MappingException("No mapping metadata found for " + rawType.getName());
}
return read(persistentEntity, dbo, parent);
return read(persistentEntity, dbo);
}
private ParameterValueProvider<MongoPersistentProperty> getParameterProvider(MongoPersistentEntity<?> entity,
DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo) {
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, parent);
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<MongoPersistentProperty>(
entity, provider, parent);
parameterProvider.setSpELEvaluator(evaluator);
final StandardEvaluationContext spelCtx = new StandardEvaluationContext(dbo);
spelCtx.addPropertyAccessor(DBObjectPropertyAccessor.INSTANCE);
return parameterProvider;
}
if (applicationContext != null) {
spelCtx.setBeanResolver(new BeanFactoryResolver(applicationContext));
}
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, Object parent) {
final MappedConstructor constructor = new MappedConstructor(entity, mappingContext);
final DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(dbo, spELContext);
SpELAwareParameterValueProvider delegate = new SpELAwareParameterValueProvider(spelExpressionParser, spelCtx);
ParameterValueProvider provider = new DelegatingParameterValueProvider(constructor, dbo, delegate);
ParameterValueProvider<MongoPersistentProperty> provider = getParameterProvider(entity, dbo, evaluator, parent);
EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity);
S instance = instantiator.createInstance(entity, provider);
final BeanWrapper<MongoPersistentEntity<S>, S> wrapper = BeanWrapper.create(instance, conversionService);
final S result = wrapper.getBean();
final BeanWrapper<MongoPersistentEntity<S>, S> wrapper = BeanWrapper.create(entity, provider, conversionService);
// Set properties not already set in the constructor
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
public void doWithPersistentProperty(MongoPersistentProperty prop) {
boolean isConstructorProperty = entity.isConstructorArgument(prop);
boolean isConstructorProperty = constructor.isConstructorParameter(prop);
boolean hasValueForProperty = dbo.containsField(prop.getFieldName());
if (!hasValueForProperty || isConstructorProperty) {
return;
}
Object obj = getValueInternal(prop, dbo, evaluator, result);
Object obj = getValueInternal(prop, dbo, spelCtx, prop.getSpelExpression());
wrapper.setProperty(prop, obj, useFieldAccessOnly);
}
});
@@ -252,7 +236,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
public void doWithAssociation(Association<MongoPersistentProperty> association) {
MongoPersistentProperty inverseProp = association.getInverse();
Object obj = getValueInternal(inverseProp, dbo, evaluator, result);
Object obj = getValueInternal(inverseProp, dbo, spelCtx, inverseProp.getSpelExpression());
try {
wrapper.setProperty(inverseProp, obj);
} catch (IllegalAccessException e) {
@@ -263,23 +247,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
}
});
return result;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.convert.MongoWriter#toDBRef(java.lang.Object, org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
*/
public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) {
org.springframework.data.mongodb.core.mapping.DBRef annotation = null;
if (referingProperty != null) {
annotation = referingProperty.getDBRef();
Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!");
}
return createDBRef(object, annotation);
return wrapper.getBean();
}
/**
@@ -471,6 +439,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
*
* @param collection must not be {@literal null}.
* @param property must not be {@literal null}.
*
* @return
*/
protected DBObject createCollection(Collection<?> collection, MongoPersistentProperty property) {
@@ -674,20 +643,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
protected DBRef createDBRef(Object target, org.springframework.data.mongodb.core.mapping.DBRef dbref) {
Assert.notNull(target);
MongoPersistentEntity<?> targetEntity = mappingContext.getPersistentEntity(target.getClass());
if (null == targetEntity) {
throw new MappingException("No mapping metadata found for " + target.getClass());
if (null == targetEntity || null == targetEntity.getIdProperty()) {
return null;
}
MongoPersistentProperty idProperty = targetEntity.getIdProperty();
if (idProperty == null) {
throw new MappingException("No id property found on class " + targetEntity.getType());
}
BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(target, conversionService);
Object id = wrapper.getProperty(idProperty, Object.class, useFieldAccessOnly);
@@ -695,17 +656,69 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
throw new MappingException("Cannot create a reference to an object with a NULL id.");
}
DB db = mongoDbFactory.getDb();
db = dbref != null && StringUtils.hasText(dbref.db()) ? mongoDbFactory.getDb(dbref.db()) : db;
String collection = dbref.collection();
if ("".equals(collection)) {
collection = targetEntity.getCollection();
}
return new DBRef(db, targetEntity.getCollection(), idMapper.convertId(id));
String dbname = dbref.db();
DB db = StringUtils.hasText(dbname) ? mongoDbFactory.getDb(dbname) : mongoDbFactory.getDb();
return new DBRef(db, collection, idMapper.convertId(id));
}
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
Object parent) {
@SuppressWarnings("unchecked")
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, StandardEvaluationContext ctx,
String spelExpr) {
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(dbo, spELContext, parent);
return provider.getPropertyValue(prop);
Object o;
if (null != spelExpr) {
Expression x = spelExpressionParser.parseExpression(spelExpr);
o = x.getValue(ctx);
} else {
Object sourceValue = dbo.get(prop.getFieldName());
if (sourceValue == null) {
return null;
}
Class<?> propertyType = prop.getType();
if (conversions.hasCustomReadTarget(sourceValue.getClass(), propertyType)) {
return conversionService.convert(sourceValue, propertyType);
}
if (sourceValue instanceof DBRef) {
sourceValue = ((DBRef) sourceValue).fetch();
}
if (sourceValue instanceof DBObject) {
if (prop.isMap()) {
return readMap(prop.getTypeInformation(), (DBObject) sourceValue);
} else if (prop.isArray() && sourceValue instanceof BasicDBObject
&& ((DBObject) sourceValue).keySet().size() == 0) {
// It's empty
return Array.newInstance(prop.getComponentType(), 0);
} else if (prop.isCollectionLike() && sourceValue instanceof BasicDBList) {
return readCollectionOrArray((TypeInformation<? extends Collection<?>>) prop.getTypeInformation(),
(BasicDBList) sourceValue);
}
TypeInformation<?> toType = typeMapper.readType((DBObject) sourceValue, prop.getTypeInformation());
// It's a complex object, have to read it in
if (toType != null) {
// TODO: why do we remove the type?
// dbo.removeField(CUSTOM_TYPE_KEY);
o = read(toType, (DBObject) sourceValue);
} else {
o = read(mappingContext.getPersistentEntity(prop.getTypeInformation()), (DBObject) sourceValue);
}
} else {
o = sourceValue;
}
}
return o;
}
/**
@@ -713,40 +726,32 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
*
* @param targetType must not be {@literal null}.
* @param sourceValue must not be {@literal null}.
* @return the converted {@link Collection} or array, will never be {@literal null}.
* @return the converted {@link Collections}, will never be {@literal null}.
*/
@SuppressWarnings("unchecked")
private Object readCollectionOrArray(TypeInformation<?> targetType, BasicDBList sourceValue, Object parent) {
private Collection<?> readCollectionOrArray(TypeInformation<?> targetType, BasicDBList sourceValue) {
Assert.notNull(targetType);
if (sourceValue.isEmpty()) {
return new HashSet<Object>();
}
Class<?> collectionType = targetType.getType();
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
.createCollection(collectionType, sourceValue.size());
TypeInformation<?> componentType = targetType.getComponentType();
Class<?> rawComponentType = componentType == null ? null : componentType.getType();
for (int i = 0; i < sourceValue.size(); i++) {
Object dbObjItem = sourceValue.get(i);
if (dbObjItem instanceof DBRef) {
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, ((DBRef) dbObjItem).fetch(),
parent));
items.add(read(targetType.getComponentType(), ((DBRef) dbObjItem).fetch()));
} else if (dbObjItem instanceof DBObject) {
items.add(read(componentType, (DBObject) dbObjItem, parent));
items.add(read(targetType.getComponentType(), (DBObject) dbObjItem));
} else {
items.add(getPotentiallyConvertedSimpleRead(dbObjItem, rawComponentType));
TypeInformation<?> componentType = targetType.getComponentType();
items.add(getPotentiallyConvertedSimpleRead(dbObjItem, componentType == null ? null : componentType.getType()));
}
}
return getPotentiallyConvertedSimpleRead(items, targetType.getType());
return items;
}
/**
@@ -757,7 +762,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
* @return
*/
@SuppressWarnings("unchecked")
protected Map<Object, Object> readMap(TypeInformation<?> type, DBObject dbObject, Object parent) {
protected Map<Object, Object> readMap(TypeInformation<?> type, DBObject dbObject) {
Assert.notNull(dbObject);
@@ -780,12 +785,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
Object value = entry.getValue();
TypeInformation<?> valueType = type.getMapValueType();
Class<?> rawValueType = valueType == null ? null : valueType.getType();
if (value instanceof DBObject) {
map.put(key, read(valueType, (DBObject) value, parent));
} else if (value instanceof DBRef) {
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, ((DBRef) value).fetch()));
map.put(key, read(valueType, (DBObject) value));
} else {
Class<?> valueClass = valueType == null ? null : valueType.getType();
map.put(key, getPotentiallyConvertedSimpleRead(value, valueClass));
@@ -817,7 +819,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return null;
}
Class<?> target = conversions.getCustomWriteTarget(obj.getClass());
Class<?> target = conversions.getCustomWriteTarget(getClass());
if (target != null) {
return conversionService.convert(obj, target);
}
@@ -848,12 +850,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return result;
}
if (obj.getClass().isArray()) {
return maybeConvertList(Arrays.asList((Object[]) obj));
if (obj instanceof List) {
return maybeConvertList((List<?>) obj);
}
if (obj instanceof Collection) {
return maybeConvertList((Collection<?>) obj);
if (obj.getClass().isArray()) {
return maybeConvertList(Arrays.asList((Object[]) obj));
}
DBObject newDbo = new BasicDBObject();
@@ -906,53 +908,53 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return dbObject;
}
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
private class DelegatingParameterValueProvider implements ParameterValueProvider {
private final DBObject source;
private final SpELExpressionEvaluator evaluator;
private final Object parent;
private final ParameterValueProvider delegate;
private final MappedConstructor constructor;
public MongoDbPropertyValueProvider(DBObject source, SpELContext factory, Object parent) {
this(source, new DefaultSpELExpressionEvaluator(source, factory), parent);
}
public MongoDbPropertyValueProvider(DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
/**
* {@link ParameterValueProvider} to delegate source object lookup to a {@link SpELAwareParameterValueProvider} in
* case a MappCon
*
* @param constructor must not be {@literal null}.
* @param source must not be {@literal null}.
* @param delegate must not be {@literal null}.
*/
public DelegatingParameterValueProvider(MappedConstructor constructor, DBObject source,
SpELAwareParameterValueProvider delegate) {
Assert.notNull(constructor);
Assert.notNull(source);
Assert.notNull(evaluator);
Assert.notNull(delegate);
this.constructor = constructor;
this.source = source;
this.evaluator = evaluator;
this.parent = parent;
this.delegate = delegate;
}
/*
* (non-Javadoc)
* @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty)
* @see org.springframework.data.mapping.model.ParameterValueProvider#getParameterValue(org.springframework.data.mapping.PreferredConstructor.Parameter)
*/
@SuppressWarnings("unchecked")
public <T> T getPropertyValue(MongoPersistentProperty property) {
public <T> T getParameterValue(Parameter<T> parameter) {
String expression = property.getSpelExpression();
Object value = expression != null ? evaluator.evaluate(expression) : source.get(property.getFieldName());
MappedConstructor.MappedParameter mappedParameter = constructor.getFor(parameter);
Object value = mappedParameter.hasSpELExpression() ? delegate.getParameterValue(parameter) : source
.get(mappedParameter.getFieldName());
if (value == null) {
return null;
}
TypeInformation<?> type = mappedParameter.getPropertyTypeInformation();
TypeInformation<?> type = property.getTypeInformation();
Class<?> rawType = type.getType();
if (conversions.hasCustomReadTarget(value.getClass(), rawType)) {
return (T) conversionService.convert(value, rawType);
} else if (value instanceof DBRef) {
return (T) (rawType.equals(DBRef.class) ? value : read(type, ((DBRef) value).fetch(), parent));
if (value instanceof DBRef) {
return (T) read(type, ((DBRef) value).fetch());
} else if (value instanceof BasicDBList) {
return (T) readCollectionOrArray(type, (BasicDBList) value, parent);
return (T) getPotentiallyConvertedSimpleRead(readCollectionOrArray(type, (BasicDBList) value), type.getType());
} else if (value instanceof DBObject) {
return (T) read(type, (DBObject) value, parent);
return (T) read(type, (DBObject) value);
} else {
return (T) getPotentiallyConvertedSimpleRead(value, rawType);
return (T) getPotentiallyConvertedSimpleRead(value, type.getType());
}
}
}

View File

@@ -15,15 +15,18 @@
*/
package org.springframework.data.mongodb.core.convert;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.BSON;
import org.bson.types.Binary;
import org.bson.types.ObjectId;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.core.convert.TypeDescriptor;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.mapping.model.MappingException;
import org.springframework.util.StringUtils;
/**
@@ -124,26 +127,50 @@ abstract class MongoConverters {
}
}
public static enum URLToStringConverter implements Converter<URL, String> {
/**
* Custom {@link Converter} to convert {@link UUID}s into {@link Binary}s.
*
* @author Oliver Gierke
*/
public static enum UUIDToBinaryConverter implements Converter<UUID, Binary> {
INSTANCE;
public String convert(URL source) {
return source == null ? null : source.toString();
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
public Binary convert(UUID source) {
try {
return source == null ? null : new Binary(BSON.B_UUID, source.toString().getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new MappingException(String.format("Could nor convert UUID %s into Binary!", source.toString()), e);
}
}
}
public static enum StringToURLConverter implements Converter<String, URL> {
public static enum BinaryToUUIDConverter implements Converter<Binary, UUID> {
INSTANCE;
private static final TypeDescriptor SOURCE = TypeDescriptor.valueOf(String.class);
private static final TypeDescriptor TARGET = TypeDescriptor.valueOf(URL.class);
private static final Log LOG = LogFactory.getLog(BinaryToUUIDConverter.class);
public URL convert(String source) {
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
public UUID convert(Binary source) {
if (BSON.B_UUID != source.getType()) {
LOG.warn(String.format("Source binary %s is not an UUID actually! Trying to read it nevertheless...",
source.toString()));
}
try {
return source == null ? null : new URL(source);
} catch (MalformedURLException e) {
throw new ConversionFailedException(SOURCE, TARGET, source, e);
return source == null ? null : UUID.fromString(new String(source.getData(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new MappingException(String.format("Could not convert Binary %s into UUID!", source.toString()), e);
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,10 +16,8 @@
package org.springframework.data.mongodb.core.convert;
import org.springframework.data.convert.EntityWriter;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import com.mongodb.DBObject;
import com.mongodb.DBRef;
/**
* A MongoWriter is responsible for converting an object of type T to the native MongoDB representation DBObject.
@@ -39,14 +37,4 @@ public interface MongoWriter<T> extends EntityWriter<T, DBObject> {
* @return
*/
Object convertToMongoType(Object obj);
/**
* Creates a {@link DBRef} to refer to the given object.
*
* @param object the object to create a {@link DBRef} to link to. The object's type has to carry an id attribute.
* @param referingProperty the client-side property referring to the object which might carry additional metadata for
* the {@link DBRef} object to create. Can be {@literal null}.
* @return will never be {@literal null}.
*/
DBRef toDBRef(Object object, MongoPersistentProperty referingProperty);
}

View File

@@ -1,11 +1,11 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.index;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -24,29 +24,14 @@ import java.lang.annotation.Target;
/**
* Mark a class to use compound indexes.
*
* @author Jon Brisbin
* @author Oliver Gierke
* @author Jon Brisbin <jbrisbin@vmware.com>
*/
@Target({ ElementType.TYPE })
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface CompoundIndex {
/**
* The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values
* define the index direction (1 for ascending, -1 for descending).
*
* @return
*/
String def();
/**
* It does not actually make sense to use that attribute as the direction has to be defined in the {@link #def()}
* attribute actually.
*
* @return
*/
@Deprecated
IndexDirection direction() default IndexDirection.ASCENDING;
boolean unique() default false;
@@ -55,18 +40,8 @@ public @interface CompoundIndex {
boolean dropDups() default false;
/**
* The name of the index to be created.
*
* @return
*/
String name() default "";
/**
* The collection the index will be created in. Will default to the collection the annotated domain class will be
* stored in.
*
* @return
*/
String collection() default "";
}

View File

@@ -1,132 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.index;
import org.springframework.data.mongodb.core.query.Order;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* Value object for an index field.
*
* @author Oliver Gierke
*/
public final class IndexField {
private final String key;
private final Order order;
private final boolean isGeo;
private IndexField(String key, Order order, boolean isGeo) {
Assert.hasText(key);
Assert.isTrue(order != null ^ isGeo);
this.key = key;
this.order = order;
this.isGeo = isGeo;
}
/**
* Creates a default {@link IndexField} with the given key and {@link Order}.
*
* @param key must not be {@literal null} or emtpy.
* @param order must not be {@literal null}.
* @return
*/
public static IndexField create(String key, Order order) {
Assert.notNull(order);
return new IndexField(key, order, false);
}
/**
* Creates a geo {@link IndexField} for the given key.
*
* @param key must not be {@literal null} or empty.
* @return
*/
public static IndexField geo(String key) {
return new IndexField(key, null, true);
}
/**
* @return the key
*/
public String getKey() {
return key;
}
/**
* Returns the order of the {@link IndexField} or {@literal null} in case we have a geo index field.
*
* @return the order
*/
public Order getOrder() {
return order;
}
/**
* Returns whether the {@link IndexField} is a geo index field.
*
* @return the isGeo
*/
public boolean isGeo() {
return isGeo;
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof IndexField)) {
return false;
}
IndexField that = (IndexField) obj;
return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.order, that.order) && this.isGeo == that.isGeo;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int result = 17;
result += 31 * ObjectUtils.nullSafeHashCode(key);
result += 31 * ObjectUtils.nullSafeHashCode(order);
result += 31 * ObjectUtils.nullSafeHashCode(isGeo);
return result;
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return String.format("IndexField [ key: %s, order: %s, isGeo: %s]", key, order, isGeo);
}
}

View File

@@ -15,57 +15,33 @@
*/
package org.springframework.data.mongodb.core.index;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import org.springframework.data.mongodb.core.query.Order;
public class IndexInfo {
private final List<IndexField> indexFields;
private final Map<String, Order> fieldSpec;
private final String name;
private final boolean unique;
private final boolean dropDuplicates;
private final boolean sparse;
private String name;
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean dropDuplicates, boolean sparse) {
private boolean unique = false;
this.indexFields = Collections.unmodifiableList(indexFields);
private boolean dropDuplicates = false;
private boolean sparse = false;
public IndexInfo(Map<String, Order> fieldSpec, String name, boolean unique, boolean dropDuplicates, boolean sparse) {
super();
this.fieldSpec = fieldSpec;
this.name = name;
this.unique = unique;
this.dropDuplicates = dropDuplicates;
this.sparse = sparse;
}
/**
* Returns the individual index fields of the index.
*
* @return
*/
public List<IndexField> getIndexFields() {
return this.indexFields;
}
/**
* Returns whether the index is covering exactly the fields given independently of the order.
*
* @param keys must not be {@literal null}.
* @return
*/
public boolean isIndexForFields(Collection<String> keys) {
Assert.notNull(keys);
List<String> indexKeys = new ArrayList<String>(indexFields.size());
for (IndexField field : indexFields) {
indexKeys.add(field.getKey());
}
return indexKeys.containsAll(keys);
public Map<String, Order> getFieldSpec() {
return fieldSpec;
}
public String getName() {
@@ -86,7 +62,7 @@ public class IndexInfo {
@Override
public String toString() {
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", dropDuplicates="
return "IndexInfo [fieldSpec=" + fieldSpec + ", name=" + name + ", unique=" + unique + ", dropDuplicates="
+ dropDuplicates + ", sparse=" + sparse + "]";
}
@@ -95,7 +71,7 @@ public class IndexInfo {
final int prime = 31;
int result = 1;
result = prime * result + (dropDuplicates ? 1231 : 1237);
result = prime * result + ObjectUtils.nullSafeHashCode(indexFields);
result = prime * result + ((fieldSpec == null) ? 0 : fieldSpec.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + (sparse ? 1231 : 1237);
result = prime * result + (unique ? 1231 : 1237);
@@ -104,39 +80,34 @@ public class IndexInfo {
@Override
public boolean equals(Object obj) {
if (this == obj) {
if (this == obj)
return true;
}
if (obj == null) {
if (obj == null)
return false;
}
if (getClass() != obj.getClass()) {
if (getClass() != obj.getClass())
return false;
}
IndexInfo other = (IndexInfo) obj;
if (dropDuplicates != other.dropDuplicates) {
if (dropDuplicates != other.dropDuplicates)
return false;
}
if (indexFields == null) {
if (other.indexFields != null) {
if (fieldSpec == null) {
if (other.fieldSpec != null)
return false;
}
} else if (!indexFields.equals(other.indexFields)) {
} else if (!fieldSpec.equals(other.fieldSpec))
return false;
}
if (name == null) {
if (other.name != null) {
if (other.name != null)
return false;
}
} else if (!name.equals(other.name)) {
} else if (!name.equals(other.name))
return false;
}
if (sparse != other.sparse) {
if (sparse != other.sparse)
return false;
}
if (unique != other.unique) {
if (unique != other.unique)
return false;
}
return true;
}
/**
* [{ "v" : 1 , "key" : { "_id" : 1} , "ns" : "database.person" , "name" : "_id_"}, { "v" : 1 , "key" : { "age" : -1}
* , "ns" : "database.person" , "name" : "age_-1" , "unique" : true , "dropDups" : true}]
*/
}

View File

@@ -1,11 +1,11 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -13,19 +13,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.index;
import java.lang.reflect.Field;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationListener;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PropertyHandler;
import org.springframework.data.mapping.event.MappingContextEvent;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
@@ -37,16 +38,16 @@ import com.mongodb.DBObject;
import com.mongodb.util.JSON;
/**
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
* for indexing metadata and ensures the indexes to be available.
* Component that inspects {@link BasicMongoPersistentEntity} instances contained in the given
* {@link MongoMappingContext} for indexing metadata and ensures the indexes to be available.
*
* @author Jon Brisbin
* @author Jon Brisbin <jbrisbin@vmware.com>
* @author Oliver Gierke
*/
public class MongoPersistentEntityIndexCreator implements
ApplicationListener<MappingContextEvent<MongoPersistentEntity<MongoPersistentProperty>, MongoPersistentProperty>> {
private static final Logger log = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
private static final Log log = LogFactory.getLog(MongoPersistentEntityIndexCreator.class);
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
private final MongoDbFactory mongoDbFactory;
@@ -75,13 +76,7 @@ public class MongoPersistentEntityIndexCreator implements
*/
public void onApplicationEvent(
MappingContextEvent<MongoPersistentEntity<MongoPersistentProperty>, MongoPersistentProperty> event) {
PersistentEntity<?, ?> entity = event.getPersistentEntity();
// Double check type as Spring infrastructure does not consider nested generics
if (entity instanceof MongoPersistentEntity) {
checkForIndexes(event.getPersistentEntity());
}
checkForIndexes(event.getPersistentEntity());
}
protected void checkForIndexes(final MongoPersistentEntity<?> entity) {
@@ -95,12 +90,12 @@ public class MongoPersistentEntityIndexCreator implements
if (type.isAnnotationPresent(CompoundIndexes.class)) {
CompoundIndexes indexes = type.getAnnotation(CompoundIndexes.class);
for (CompoundIndex index : indexes.value()) {
String indexColl = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
DBObject definition = (DBObject) JSON.parse(index.def());
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse());
String indexColl = index.collection();
if ("".equals(indexColl)) {
indexColl = entity.getCollection();
}
ensureIndex(indexColl, index.name(), index.def(), index.direction(), index.unique(), index.dropDups(),
index.sparse());
if (log.isDebugEnabled()) {
log.debug("Created compound index " + index);
}
@@ -109,14 +104,10 @@ public class MongoPersistentEntityIndexCreator implements
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
Field field = persistentProperty.getField();
if (field.isAnnotationPresent(Indexed.class)) {
Indexed index = field.getAnnotation(Indexed.class);
String name = index.name();
if (!StringUtils.hasText(name)) {
name = persistentProperty.getFieldName();
} else {
@@ -128,17 +119,11 @@ public class MongoPersistentEntityIndexCreator implements
}
}
}
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
int direction = index.direction() == IndexDirection.ASCENDING ? 1 : -1;
DBObject definition = new BasicDBObject(persistentProperty.getFieldName(), direction);
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse());
ensureIndex(collection, name, null, index.direction(), index.unique(), index.dropDups(), index.sparse());
if (log.isDebugEnabled()) {
log.debug("Created property index " + index);
}
} else if (field.isAnnotationPresent(GeoSpatialIndexed.class)) {
GeoSpatialIndexed index = field.getAnnotation(GeoSpatialIndexed.class);
@@ -163,15 +148,21 @@ public class MongoPersistentEntityIndexCreator implements
}
}
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
boolean dropDups, boolean sparse) {
protected void ensureIndex(String collection, final String name, final String def, final IndexDirection direction,
final boolean unique, final boolean dropDups, final boolean sparse) {
DBObject defObj;
if (null != def) {
defObj = (DBObject) JSON.parse(def);
} else {
defObj = new BasicDBObject();
defObj.put(name, (direction == IndexDirection.ASCENDING ? 1 : -1));
}
DBObject opts = new BasicDBObject();
opts.put("name", name);
opts.put("dropDups", dropDups);
opts.put("sparse", sparse);
opts.put("unique", unique);
mongoDbFactory.getDb().getCollection(collection).ensureIndex(indexDefinition, opts);
mongoDbFactory.getDb().getCollection(collection).ensureIndex(defObj, opts);
}
}

View File

@@ -21,9 +21,9 @@ import java.math.BigInteger;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
import org.springframework.data.mapping.model.SimpleTypeHolder;
@@ -39,7 +39,7 @@ import com.mongodb.DBObject;
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty> implements
MongoPersistentProperty {
private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class);
private static final Log LOG = LogFactory.getLog(BasicMongoPersistentProperty.class);
private static final String ID_FIELD_NAME = "_id";
private static final Set<Class<?>> SUPPORTED_ID_TYPES = new HashSet<Class<?>>();
@@ -87,7 +87,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
*/
@Override
public boolean isIdProperty() {
if (super.isIdProperty()) {
return true;
}

View File

@@ -28,7 +28,6 @@ import org.springframework.data.mapping.model.SimpleTypeHolder;
public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty {
private Boolean isIdProperty;
private Boolean isAssociation;
private String fieldName;
/**
@@ -58,18 +57,6 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
return this.isIdProperty;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#isAssociation()
*/
@Override
public boolean isAssociation() {
if (this.isAssociation == null) {
this.isAssociation = super.isAssociation();
}
return this.isAssociation;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldName()

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 by the original author(s).
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.mapping;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -24,21 +24,19 @@ import java.lang.annotation.Target;
import org.springframework.data.annotation.Reference;
/**
* An annotation that indicates the annotated field is to be stored using a {@link com.mongodb.DBRef}.
* An annotation that indicates the annotated field is to be stored using a com.mongodb.DBRef
*
* @author Jon Brisbin
* @authot Oliver Gierke
* @author Jon Brisbin <jbrisbin@vmware.com>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.FIELD })
@Reference
public @interface DBRef {
/**
* The database the referred entity resides in.
*
* @return
*/
String collection() default "";
String id() default "";
String db() default "";
}

View File

@@ -19,7 +19,6 @@ import java.math.BigInteger;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Pattern;
import org.bson.types.Binary;
@@ -53,7 +52,6 @@ public abstract class MongoSimpleTypes {
simpleTypes.add(DBObject.class);
simpleTypes.add(Pattern.class);
simpleTypes.add(Binary.class);
simpleTypes.add(UUID.class);
MONGO_SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes);
}

View File

@@ -15,13 +15,12 @@
*/
package org.springframework.data.mongodb.core.mapping.event;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBObject;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationListener;
import org.springframework.core.GenericTypeResolver;
import com.mongodb.DBObject;
/**
* Base class to implement domain class specific {@link ApplicationListener}s.
*
@@ -30,7 +29,7 @@ import com.mongodb.DBObject;
*/
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
private static final Logger LOG = LoggerFactory.getLogger(AbstractMongoEventListener.class);
protected final Log LOG = LogFactory.getLog(getClass());
private final Class<?> domainClass;
/**

View File

@@ -1,11 +1,11 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright (c) 2011 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,31 +15,22 @@
*/
package org.springframework.data.mongodb.core.mapping.event;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBObject;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* @author Jon Brisbin <jbrisbin@vmware.com>
*/
public class LoggingEventListener extends AbstractMongoEventListener<Object> {
private static final Logger log = LoggerFactory.getLogger(LoggingEventListener.class);
private Log log = LogFactory.getLog(getClass());
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeConvert(java.lang.Object)
*/
@Override
public void onBeforeConvert(Object source) {
log.info("onBeforeConvert: " + source);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
*/
@Override
public void onBeforeSave(Object source, DBObject dbo) {
try {
@@ -48,30 +39,19 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
}
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterSave(java.lang.Object, com.mongodb.DBObject)
*/
@Override
public void onAfterSave(Object source, DBObject dbo) {
log.info("onAfterSave: " + source + ", " + dbo);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterLoad(com.mongodb.DBObject)
*/
@Override
public void onAfterLoad(DBObject dbo) {
log.info("onAfterLoad: " + dbo);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterConvert(com.mongodb.DBObject, java.lang.Object)
*/
@Override
public void onAfterConvert(DBObject dbo, Object source) {
log.info("onAfterConvert: " + dbo + ", " + source);
}
}

View File

@@ -1,68 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.mapping.event;
import java.util.Set;
import javax.validation.ConstraintViolationException;
import javax.validation.Validator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
import com.mongodb.DBObject;
/**
* javax.validation dependant entities validator. When it is registered as Spring component its automatically invoked
* before entities are saved in database.
*
* @author Maciej Walkowiak
*/
public class ValidatingMongoEventListener extends AbstractMongoEventListener<Object> {
private static final Logger LOG = LoggerFactory.getLogger(ValidatingMongoEventListener.class);
private final Validator validator;
/**
* Creates a new {@link ValidatingMongoEventListener} using the given {@link Validator}.
*
* @param validator must not be {@literal null}.
*/
public ValidatingMongoEventListener(Validator validator) {
Assert.notNull(validator);
this.validator = validator;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public void onBeforeSave(Object source, DBObject dbo) {
LOG.debug("Validating object: {}", source);
Set violations = validator.validate(source);
if (!violations.isEmpty()) {
LOG.info("During object: {} validation violations found: {}", source, violations);
throw new ConstraintViolationException(violations);
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,39 +26,33 @@ import com.mongodb.DBObject;
* Collects the results of performing a MapReduce operations.
*
* @author Mark Pollack
* @author Oliver Gierke
* @param <T> The class in which the results are mapped onto, accessible via an iterator.
*
* @param <T> The class in which the results are mapped onto, accessible via an interator.
*/
public class MapReduceResults<T> implements Iterable<T> {
private final List<T> mappedResults;
private final DBObject rawResults;
private final String outputCollection;
private final MapReduceTiming mapReduceTiming;
private final MapReduceCounts mapReduceCounts;
/**
* Creates a new {@link MapReduceResults} from the given mapped results and the raw one.
*
* @param mappedResults must not be {@literal null}.
* @param rawResults must not be {@literal null}.
*/
private DBObject rawResults;
private MapReduceTiming mapReduceTiming;
private MapReduceCounts mapReduceCounts;
private String outputCollection;
public MapReduceResults(List<T> mappedResults, DBObject rawResults) {
Assert.notNull(mappedResults);
Assert.notNull(rawResults);
this.mappedResults = mappedResults;
this.rawResults = rawResults;
this.mapReduceTiming = parseTiming(rawResults);
this.mapReduceCounts = parseCounts(rawResults);
this.outputCollection = parseOutputCollection(rawResults);
parseTiming(rawResults);
parseCounts(rawResults);
if (rawResults.get("result") != null) {
this.outputCollection = (String) rawResults.get("result");
}
}
/*
* (non-Javadoc)
* @see java.lang.Iterable#iterator()
*/
public Iterator<T> iterator() {
return mappedResults.iterator();
}
@@ -79,71 +73,28 @@ public class MapReduceResults<T> implements Iterable<T> {
return rawResults;
}
private MapReduceTiming parseTiming(DBObject rawResults) {
protected void parseTiming(DBObject rawResults) {
DBObject timing = (DBObject) rawResults.get("timing");
if (timing == null) {
return new MapReduceTiming(-1, -1, -1);
if (timing != null) {
if (timing.get("mapTime") != null && timing.get("emitLoop") != null && timing.get("total") != null) {
mapReduceTiming = new MapReduceTiming((Long) timing.get("mapTime"), (Integer) timing.get("emitLoop"),
(Integer) timing.get("total"));
}
} else {
mapReduceTiming = new MapReduceTiming(-1, -1, -1);
}
if (timing.get("mapTime") != null && timing.get("emitLoop") != null && timing.get("total") != null) {
return new MapReduceTiming(getAsLong(timing, "mapTime"), getAsLong(timing, "emitLoop"),
getAsLong(timing, "total"));
}
return new MapReduceTiming(-1, -1, -1);
}
/**
* Returns the value of the source's field with the given key as {@link Long}.
*
* @param source
* @param key
* @return
*/
private Long getAsLong(DBObject source, String key) {
Object raw = source.get(key);
return raw instanceof Long ? (Long) raw : (Integer) raw;
}
/**
* Parses the raw {@link DBObject} result into a {@link MapReduceCounts} value object.
*
* @param rawResults
* @return
*/
private MapReduceCounts parseCounts(DBObject rawResults) {
protected void parseCounts(DBObject rawResults) {
DBObject counts = (DBObject) rawResults.get("counts");
if (counts == null) {
return new MapReduceCounts(-1, -1, -1);
if (counts != null) {
if (counts.get("input") != null && counts.get("emit") != null && counts.get("output") != null) {
mapReduceCounts = new MapReduceCounts((Integer) counts.get("input"), (Integer) counts.get("emit"),
(Integer) counts.get("output"));
}
} else {
mapReduceCounts = new MapReduceCounts(-1, -1, -1);
}
if (counts.get("input") != null && counts.get("emit") != null && counts.get("output") != null) {
return new MapReduceCounts((Integer) counts.get("input"), (Integer) counts.get("emit"),
(Integer) counts.get("output"));
}
return new MapReduceCounts(-1, -1, -1);
}
/**
* Parses the output collection from the raw {@link DBObject} result.
*
* @param rawResults
* @return
*/
private String parseOutputCollection(DBObject rawResults) {
Object resultField = rawResults.get("result");
if (resultField == null) {
return null;
}
return resultField instanceof DBObject ? ((DBObject) resultField).get("collection").toString() : resultField
.toString();
}
}

View File

@@ -15,23 +15,19 @@
*/
package org.springframework.data.mongodb.core.query;
import static org.springframework.util.ObjectUtils.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.regex.Pattern;
import org.bson.BSON;
import org.bson.types.BasicBSONList;
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
import org.springframework.data.mongodb.core.geo.Circle;
import org.springframework.data.mongodb.core.geo.Point;
import org.springframework.data.mongodb.core.geo.Shape;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
@@ -101,17 +97,13 @@ public class Criteria implements CriteriaDefinition {
throw new InvalidMongoDbApiUsageException(
"Multiple 'is' values declared. You need to use 'and' with multiple criteria");
}
if (lastOperatorWasNot()) {
if (this.criteria.size() > 0 && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1])) {
throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead.");
}
this.isValue = o;
return this;
}
private boolean lastOperatorWasNot() {
return this.criteria.size() > 0 && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
}
/**
* Creates a criterion using the $ne operator
*
@@ -277,11 +269,7 @@ public class Criteria implements CriteriaDefinition {
* @return
*/
public Criteria not() {
return not(null);
}
private Criteria not(Object value) {
criteria.put("$not", value);
criteria.put("$not", null);
return this;
}
@@ -292,7 +280,8 @@ public class Criteria implements CriteriaDefinition {
* @return
*/
public Criteria regex(String re) {
return regex(re, null);
criteria.put("$regex", re);
return this;
}
/**
@@ -303,32 +292,13 @@ public class Criteria implements CriteriaDefinition {
* @return
*/
public Criteria regex(String re, String options) {
return regex(toPattern(re, options));
}
/**
* Syntactical sugar for {@link #is(Object)} making obvious that we create a regex predicate.
*
* @param pattern
* @return
*/
public Criteria regex(Pattern pattern) {
Assert.notNull(pattern);
if (lastOperatorWasNot()) {
return not(pattern);
criteria.put("$regex", re);
if (StringUtils.hasText(options)) {
criteria.put("$options", options);
}
this.isValue = pattern;
return this;
}
private Pattern toPattern(String regex, String options) {
Assert.notNull(regex);
return Pattern.compile(regex, options == null ? 0 : BSON.regexFlags(options));
}
/**
* Creates a geospatial criterion using a $within $center operation. This is only available for Mongo 1.7 and higher.
*
@@ -432,9 +402,11 @@ public class Criteria implements CriteriaDefinition {
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject()
*/
* (non-Javadoc)
*
* @see org.springframework.datastore.document.mongodb.query.Criteria#
* getCriteriaObject(java.lang.String)
*/
public DBObject getCriteriaObject() {
if (this.criteriaChain.size() == 1) {
return criteriaChain.get(0).getSingleCriteriaObject();
@@ -454,17 +426,16 @@ public class Criteria implements CriteriaDefinition {
DBObject dbo = new BasicDBObject();
boolean not = false;
for (String k : this.criteria.keySet()) {
Object value = this.criteria.get(k);
if (not) {
DBObject notDbo = new BasicDBObject();
notDbo.put(k, value);
notDbo.put(k, this.criteria.get(k));
dbo.put("$not", notDbo);
not = false;
} else {
if ("$not".equals(k) && value == null) {
if ("$not".equals(k)) {
not = true;
} else {
dbo.put(k, value);
dbo.put(k, this.criteria.get(k));
}
}
}
@@ -497,63 +468,4 @@ public class Criteria implements CriteriaDefinition {
}
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || !getClass().equals(obj.getClass())) {
return false;
}
Criteria that = (Criteria) obj;
boolean keyEqual = this.key == null ? that.key == null : this.key.equals(that.key);
boolean criteriaEqual = this.criteria.equals(that.criteria);
boolean valueEqual = isEqual(this.isValue, that.isValue);
return keyEqual && criteriaEqual && valueEqual;
}
/**
* Checks the given objects for equality. Handles {@link Pattern} and arrays correctly.
*
* @param left
* @param right
* @return
*/
private boolean isEqual(Object left, Object right) {
if (left == null) {
return right == null;
}
if (left instanceof Pattern) {
return right instanceof Pattern ? ((Pattern) left).pattern().equals(((Pattern) right).pattern()) : false;
}
return ObjectUtils.nullSafeEquals(left, right);
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int result = 17;
result += nullSafeHashCode(key);
result += criteria.hashCode();
result += nullSafeHashCode(isValue);
return result;
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,22 +13,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.cdi;
package org.springframework.data.mongodb.core.query;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
/**
* @author Oliver Gierke
*/
class RepositoryClient {
public class OrQuery extends Query {
@Inject
CdiPersonRepository repository;
/**
* @return the repository
*/
public CdiPersonRepository getRepository() {
return repository;
public OrQuery(Query... q) {
super(getOrCriteria(q));
}
private static Criteria getOrCriteria(Query[] queries) {
List<Criteria> criteriaList = new ArrayList<Criteria>();
for (Query q : queries) {
criteriaList.addAll(q.getCriteria());
}
return new Criteria(criteriaList, "$or");
}
}

View File

@@ -15,9 +15,6 @@
*/
package org.springframework.data.mongodb.core.query;
import static org.springframework.data.mongodb.core.SerializationUtils.*;
import static org.springframework.util.ObjectUtils.*;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
@@ -143,60 +140,4 @@ public class Query {
protected List<Criteria> getCriteria() {
return new ArrayList<Criteria>(this.criteria.values());
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return String.format("Query: %s, Fields: %s, Sort: %s", serializeToJsonSafely(getQueryObject()),
serializeToJsonSafely(getFieldsObject()), serializeToJsonSafely(getSortObject()));
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || !getClass().equals(obj.getClass())) {
return false;
}
Query that = (Query) obj;
boolean criteriaEqual = this.criteria.equals(that.criteria);
boolean fieldsEqual = this.fieldSpec == null ? that.fieldSpec == null : this.fieldSpec.equals(that.fieldSpec);
boolean sortEqual = this.sort == null ? that.sort == null : this.sort.equals(that.sort);
boolean hintEqual = this.hint == null ? that.hint == null : this.hint.equals(that.hint);
boolean skipEqual = this.skip == that.skip;
boolean limitEqual = this.limit == that.limit;
return criteriaEqual && fieldsEqual && sortEqual && hintEqual && skipEqual && limitEqual;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int result = 17;
result += 31 * criteria.hashCode();
result += 31 * nullSafeHashCode(fieldSpec);
result += 31 * nullSafeHashCode(sort);
result += 31 * nullSafeHashCode(hint);
result += 31 * skip;
result += 31 * limit;
return result;
}
}

View File

@@ -1,106 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.gridfs;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.util.Assert;
/**
* Value object to abstract Ant paths.
*
* @author Oliver Gierke
*/
class AntPath {
private static final String PREFIX_DELIMITER = ":";
private static final Pattern WILDCARD_PATTERN = Pattern.compile("\\?|\\*\\*|\\*");
private final String path;
/**
* Creates a new {@link AntPath} from the given path.
*
* @param path must not be {@literal null}.
*/
public AntPath(String path) {
Assert.notNull(path);
this.path = path;
}
/**
* Returns whether the path is a pattern.
*
* @return
*/
public boolean isPattern() {
String path = stripPrefix(this.path);
return (path.indexOf('*') != -1 || path.indexOf('?') != -1);
}
private static String stripPrefix(String path) {
int index = path.indexOf(PREFIX_DELIMITER);
return (index > -1 ? path.substring(index + 1) : path);
}
/**
* Returns the regular expression equivalent of this Ant path.
*
* @return
*/
public String toRegex() {
StringBuilder patternBuilder = new StringBuilder();
Matcher m = WILDCARD_PATTERN.matcher(path);
int end = 0;
while (m.find()) {
patternBuilder.append(quote(path, end, m.start()));
String match = m.group();
if ("?".equals(match)) {
patternBuilder.append('.');
} else if ("**".equals(match)) {
patternBuilder.append(".*");
} else if ("*".equals(match)) {
patternBuilder.append("[^/]*");
}
end = m.end();
}
patternBuilder.append(quote(path, end, path.length()));
return patternBuilder.toString();
}
private static String quote(String s, int start, int end) {
if (start == end) {
return "";
}
return Pattern.quote(s.substring(start, end));
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return path;
}
}

View File

@@ -1,73 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.gridfs;
import org.springframework.data.mongodb.core.query.Criteria;
/**
* GridFs-specific helper class to define {@link Criteria}s.
*
* @author Oliver Gierke
*/
public class GridFsCriteria extends Criteria {
/**
* Creates a new {@link GridFsCriteria} for the given key.
*
* @param key
*/
public GridFsCriteria(String key) {
super(key);
}
/**
* Creates a {@link GridFsCriteria} for restrictions on the file's metadata.
*
* @return
*/
public static GridFsCriteria whereMetaData() {
return new GridFsCriteria("metadata");
}
/**
* Creates a {@link GridFsCriteria} for restrictions on a single file's metadata item.
*
* @param metadataKey
* @return
*/
public static GridFsCriteria whereMetaData(String metadataKey) {
String extension = metadataKey == null ? "" : "." + metadataKey;
return new GridFsCriteria(String.format("metadata%s", extension));
}
/**
* Creates a {@link GridFsCriteria} for restrictions on the file's name.
*
* @return
*/
public static GridFsCriteria whereFilename() {
return new GridFsCriteria("filename");
}
/**
* Creates a {@link GridFsCriteria} for restrictions on the file's content type.
*
* @return
*/
public static GridFsCriteria whereContentType() {
return new GridFsCriteria("contentType");
}
}

View File

@@ -1,105 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.gridfs;
import java.io.InputStream;
import java.util.List;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.data.mongodb.core.query.Query;
import com.mongodb.DBObject;
import com.mongodb.gridfs.GridFSDBFile;
import com.mongodb.gridfs.GridFSFile;
/**
* Collection of operations to store and read files from MongoDB GridFS.
*
* @author Oliver Gierke
*/
public interface GridFsOperations extends ResourcePatternResolver {
/**
* Stores the given content into a file with the given name.
*
* @param content must not be {@literal null}.
* @param filename must not be {@literal null} or empty.
* @return the {@link GridFSFile} just created
*/
GridFSFile store(InputStream content, String filename);
/**
* Stores the given content into a file with the given name using the given metadata. The metadata object will be
* marshalled before writing.
*
* @param content must not be {@literal null}.
* @param filename must not be {@literal null} or empty.
* @param metadata
* @return the {@link GridFSFile} just created
*/
GridFSFile store(InputStream content, String filename, Object metadata);
/**
* Stores the given content into a file with the given name using the given metadata.
*
* @param content must not be {@literal null}.
* @param filename must not be {@literal null} or empty.
* @param metadata must not be {@literal null}.
* @return the {@link GridFSFile} just created
*/
GridFSFile store(InputStream content, String filename, DBObject metadata);
/**
* Returns all files matching the given query.
*
* @param query
* @return
*/
List<GridFSDBFile> find(Query query);
/**
* Returns a single file matching the given query or {@literal null} in case no file matches.
*
* @param query
* @return
*/
GridFSDBFile findOne(Query query);
/**
* Deletes all files matching the given {@link Query}.
*
* @param query
*/
void delete(Query query);
/**
* Returns all {@link GridFsResource} with the given file name.
*
* @param filename
* @return
* @see ResourcePatternResolver#getResource(String)
*/
GridFsResource getResource(String filename);
/**
* Returns all {@link GridFsResource}s matching the given file name pattern.
*
* @param filenamePattern
* @return
* @see ResourcePatternResolver#getResources(String)
*/
GridFsResource[] getResources(String filenamePattern);
}

View File

@@ -1,88 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.gridfs;
import java.io.IOException;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import com.mongodb.gridfs.GridFSDBFile;
/**
* {@link GridFSDBFile} based {@link Resource} implementation.
*
* @author Oliver Gierke
*/
public class GridFsResource extends InputStreamResource {
private final GridFSDBFile file;
/**
* Creates a new {@link GridFsResource} from the given {@link GridFSDBFile}.
*
* @param file must not be {@literal null}.
*/
public GridFsResource(GridFSDBFile file) {
super(file.getInputStream());
this.file = file;
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.AbstractResource#contentLength()
*/
@Override
public long contentLength() throws IOException {
return file.getLength();
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.AbstractResource#getFilename()
*/
@Override
public String getFilename() throws IllegalStateException {
return file.getFilename();
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.AbstractResource#lastModified()
*/
@Override
public long lastModified() throws IOException {
return file.getUploadDate().getTime();
}
/**
* Returns the {@link Resource}'s id.
*
* @return
*/
public Object getId() {
return file.getId();
}
/**
* Returns the {@link Resource}'s content type.
*
* @return
*/
public String getContentType() {
return file.getContentType();
}
}

View File

@@ -1,194 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.gridfs;
import static org.springframework.data.mongodb.core.query.Query.*;
import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBObject;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import com.mongodb.gridfs.GridFSFile;
import com.mongodb.gridfs.GridFSInputFile;
/**
* {@link GridFsOperations} implementation to store content into MongoDB GridFS.
*
* @author Oliver Gierke
*/
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
private final MongoDbFactory dbFactory;
private final String bucket;
private final MongoConverter converter;
private final QueryMapper queryMapper;
/**
* Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}.
*
* @param dbFactory must not be {@literal null}.
* @param converter must not be {@literal null}.
*/
public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter) {
this(dbFactory, converter, null);
}
/**
* Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}.
*
* @param dbFactory must not be {@literal null}.
* @param converter must not be {@literal null}.
* @param bucket
*/
public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter, String bucket) {
Assert.notNull(dbFactory);
Assert.notNull(converter);
this.dbFactory = dbFactory;
this.converter = converter;
this.bucket = bucket;
this.queryMapper = new QueryMapper(converter);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String)
*/
public GridFSFile store(InputStream content, String filename) {
return store(content, filename, (Object) null);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.Object)
*/
public GridFSFile store(InputStream content, String filename, Object metadata) {
DBObject dbObject = new BasicDBObject();
converter.write(metadata, dbObject);
return store(content, filename, dbObject);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.DBObject)
*/
public GridFSFile store(InputStream content, String filename, DBObject metadata) {
Assert.notNull(content);
Assert.hasText(filename);
Assert.notNull(metadata);
GridFSInputFile file = getGridFs().createFile(content);
file.setFilename(filename);
file.setMetaData(metadata);
file.save();
return file;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#find(com.mongodb.DBObject)
*/
public List<GridFSDBFile> find(Query query) {
return getGridFs().find(getMappedQuery(query));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#findOne(com.mongodb.DBObject)
*/
public GridFSDBFile findOne(Query query) {
return getGridFs().findOne(getMappedQuery(query));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#delete(org.springframework.data.mongodb.core.query.Query)
*/
public void delete(Query query) {
getGridFs().remove(getMappedQuery(query));
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.ResourceLoader#getClassLoader()
*/
public ClassLoader getClassLoader() {
return dbFactory.getClass().getClassLoader();
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String)
*/
public GridFsResource getResource(String location) {
return new GridFsResource(findOne(query(whereFilename().is(location))));
}
/*
* (non-Javadoc)
* @see org.springframework.core.io.support.ResourcePatternResolver#getResources(java.lang.String)
*/
public GridFsResource[] getResources(String locationPattern) {
if (!StringUtils.hasText(locationPattern)) {
return new GridFsResource[0];
}
AntPath path = new AntPath(locationPattern);
if (path.isPattern()) {
List<GridFSDBFile> files = find(query(whereFilename().regex(path.toRegex())));
List<GridFsResource> resources = new ArrayList<GridFsResource>(files.size());
for (GridFSDBFile file : files) {
resources.add(new GridFsResource(file));
}
return resources.toArray(new GridFsResource[resources.size()]);
}
return new GridFsResource[] { getResource(locationPattern) };
}
private DBObject getMappedQuery(Query query) {
return query == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null);
}
private GridFS getGridFs() {
DB db = dbFactory.getDb();
return bucket == null ? new GridFS(db) : new GridFS(db, bucket);
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2002-2012 the original author or authors.
* Copyright 2002-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,15 +15,13 @@
*/
package org.springframework.data.mongodb.monitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.core.MongoDbUtils;
import com.mongodb.CommandResult;
import com.mongodb.DB;
import com.mongodb.Mongo;
import com.mongodb.MongoException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.mongodb.core.MongoDbUtils;
/**
* Base class to encapsulate common configuration settings when connecting to a database
@@ -32,7 +30,7 @@ import com.mongodb.MongoException;
*/
public abstract class AbstractMonitor {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Log logger = LogFactory.getLog(getClass());
protected Mongo mongo;
private String username;
@@ -53,6 +51,7 @@ public abstract class AbstractMonitor {
* @param password The password to use
*/
public void setPassword(String password) {
this.password = password;
}
@@ -66,6 +65,6 @@ public abstract class AbstractMonitor {
}
public DB getDb(String databaseName) {
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password));
return MongoDbUtils.getDB(mongo, databaseName, username, password == null ? null : password.toCharArray());
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -32,19 +32,22 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#save(java.lang.Iterable)
*
* @see org.springframework.data.repository.Repository#save(java.lang.Iterable)
*/
<S extends T> List<S> save(Iterable<S> entites);
List<T> save(Iterable<? extends T> entites);
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAll()
*/
* (non-Javadoc)
*
* @see org.springframework.data.repository.Repository#findAll()
*/
List<T> findAll();
/*
* (non-Javadoc)
* @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort)
*/
* (non-Javadoc)
*
* @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort)
*/
List<T> findAll(Sort sort);
}

View File

@@ -1,76 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.cdi;
import java.lang.annotation.Annotation;
import java.util.Set;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory;
import org.springframework.data.repository.cdi.CdiRepositoryBean;
import org.springframework.util.Assert;
/**
* {@link CdiRepositoryBean} to create Mongo repository instances.
*
* @author Oliver Gierke
*/
public class MongoRepositoryBean<T> extends CdiRepositoryBean<T> {
private final Bean<MongoOperations> operations;
/**
* Creates a new {@link MongoRepositoryBean}.
*
* @param operations must not be {@literal null}.
* @param qualifiers must not be {@literal null}.
* @param repositoryType must not be {@literal null}.
* @param beanManager must not be {@literal null}.
*/
public MongoRepositoryBean(Bean<MongoOperations> operations, Set<Annotation> qualifiers, Class<T> repositoryType,
BeanManager beanManager) {
super(qualifiers, repositoryType, beanManager);
Assert.notNull(operations);
this.operations = operations;
}
/*
* (non-Javadoc)
* @see javax.enterprise.inject.spi.Bean#getScope()
*/
public Class<? extends Annotation> getScope() {
return operations.getScope();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.cdi.CdiRepositoryBean#create(javax.enterprise.context.spi.CreationalContext, java.lang.Class)
*/
@Override
protected T create(CreationalContext<T> creationalContext, Class<T> repositoryType) {
MongoOperations mongoOperations = getDependencyInstance(operations, MongoOperations.class);
MongoRepositoryFactory factory = new MongoRepositoryFactory(mongoOperations);
return factory.getRepository(repositoryType);
}
}

View File

@@ -1,111 +0,0 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.cdi;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.UnsatisfiedResolutionException;
import javax.enterprise.inject.spi.AfterBeanDiscovery;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.enterprise.inject.spi.ProcessBean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport;
/**
* CDI extension to export Mongo repositories.
*
* @author Oliver Gierke
*/
public class MongoRepositoryExtension extends CdiRepositoryExtensionSupport {
private static final Logger LOG = LoggerFactory.getLogger(MongoRepositoryExtension.class);
private final Map<Set<Annotation>, Bean<MongoOperations>> mongoOperations = new HashMap<Set<Annotation>, Bean<MongoOperations>>();
public MongoRepositoryExtension() {
LOG.info("Activating CDI extension for Spring Data MongoDB repositories.");
}
@SuppressWarnings("unchecked")
<X> void processBean(@Observes ProcessBean<X> processBean) {
Bean<X> bean = processBean.getBean();
for (Type type : bean.getTypes()) {
if (type instanceof Class<?> && MongoOperations.class.isAssignableFrom((Class<?>) type)) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Discovered %s with qualifiers %s.", MongoOperations.class.getName(),
bean.getQualifiers()));
}
// Store the EntityManager bean using its qualifiers.
mongoOperations.put(new HashSet<Annotation>(bean.getQualifiers()), (Bean<MongoOperations>) bean);
}
}
}
void afterBeanDiscovery(@Observes AfterBeanDiscovery afterBeanDiscovery, BeanManager beanManager) {
for (Entry<Class<?>, Set<Annotation>> entry : getRepositoryTypes()) {
Class<?> repositoryType = entry.getKey();
Set<Annotation> qualifiers = entry.getValue();
// Create the bean representing the repository.
Bean<?> repositoryBean = createRepositoryBean(repositoryType, qualifiers, beanManager);
if (LOG.isInfoEnabled()) {
LOG.info(String.format("Registering bean for %s with qualifiers %s.", repositoryType.getName(), qualifiers));
}
// Register the bean to the container.
afterBeanDiscovery.addBean(repositoryBean);
}
}
/**
* Creates a {@link Bean}.
*
* @param <T> The type of the repository.
* @param repositoryType The class representing the repository.
* @param beanManager The BeanManager instance.
* @return The bean.
*/
private <T> Bean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers, BeanManager beanManager) {
// Determine the MongoOperations bean which matches the qualifiers of the repository.
Bean<MongoOperations> mongoOperations = this.mongoOperations.get(qualifiers);
if (mongoOperations == null) {
throw new UnsatisfiedResolutionException(String.format("Unable to resolve a bean for '%s' with qualifiers %s.",
MongoOperations.class.getName(), qualifiers));
}
// Construct and return the repository bean.
return new MongoRepositoryBean<T>(mongoOperations, qualifiers, repositoryType, beanManager);
}
}

View File

@@ -1,120 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.context.annotation.ComponentScan.Filter;
import org.springframework.context.annotation.Import;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
/**
* Annotation to activate MongoDB repositories. If no base package is configured through either {@link #value()},
* {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of annotated class.
*
* @author Oliver Gierke
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Import(MongoRepositoriesRegistrar.class)
public @interface EnableMongoRepositories {
/**
* Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.:
* {@code @EnableJpaRepositories("org.my.pkg")} instead of {@code @EnableJpaRepositories(basePackages="org.my.pkg")}.
*/
String[] value() default {};
/**
* Base packages to scan for annotated components. {@link #value()} is an alias for (and mutually exclusive with) this
* attribute. Use {@link #basePackageClasses()} for a type-safe alternative to String-based package names.
*/
String[] basePackages() default {};
/**
* Type-safe alternative to {@link #basePackages()} for specifying the packages to scan for annotated components. The
* package of each class specified will be scanned. Consider creating a special no-op marker class or interface in
* each package that serves no purpose other than being referenced by this attribute.
*/
Class<?>[] basePackageClasses() default {};
/**
* Specifies which types are eligible for component scanning. Further narrows the set of candidate components from
* everything in {@link #basePackages()} to everything in the base packages that matches the given filter or filters.
*/
Filter[] includeFilters() default {};
/**
* Specifies which types are not eligible for component scanning.
*/
Filter[] excludeFilters() default {};
/**
* Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So
* for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning
* for {@code PersonRepositoryImpl}.
*
* @return
*/
String repositoryImplementationPostfix() default "";
/**
* Configures the location of where to find the Spring Data named queries properties file. Will default to
* {@code META-INFO/mongo-named-queries.properties}.
*
* @return
*/
String namedQueriesLocation() default "";
/**
* Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to
* {@link Key#CREATE_IF_NOT_FOUND}.
*
* @return
*/
Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND;
/**
* Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to
* {@link MongoRepositoryFactoryBean}.
*
* @return
*/
Class<?> repositoryFactoryBeanClass() default MongoRepositoryFactoryBean.class;
/**
* Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected.
*
* @return
*/
String mongoTemplateRef() default "mongoTemplate";
/**
* Whether to automatically create indexes for query methods defined in the repository interface.
*
* @return
*/
boolean createIndexesForQueryMethods() default false;
}

View File

@@ -1,48 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Annotation;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
/**
* Mongo-specific {@link ImportBeanDefinitionRegistrar}.
*
* @author Oliver Gierke
*/
class MongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport {
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation()
*/
@Override
protected Class<? extends Annotation> getAnnotation() {
return EnableMongoRepositories.class;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension()
*/
@Override
protected RepositoryConfigurationExtension getExtension() {
return new MongoRepositoryConfigurationExtension();
}
}

View File

@@ -0,0 +1,54 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.data.mongodb.repository.config.SimpleMongoRepositoryConfiguration.MongoRepositoryConfiguration;
import org.springframework.data.repository.config.AbstractRepositoryConfigDefinitionParser;
import org.w3c.dom.Element;
/**
* {@link org.springframework.beans.factory.xml.BeanDefinitionParser} to create Mongo DB repositories from classpath
* scanning or manual definition.
*
* @author Oliver Gierke
*/
public class MongoRepositoryConfigParser extends
AbstractRepositoryConfigDefinitionParser<SimpleMongoRepositoryConfiguration, MongoRepositoryConfiguration> {
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.AbstractRepositoryConfigDefinitionParser#getGlobalRepositoryConfigInformation(org.w3c.dom.Element)
*/
@Override
protected SimpleMongoRepositoryConfiguration getGlobalRepositoryConfigInformation(Element element) {
return new SimpleMongoRepositoryConfiguration(element);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.AbstractRepositoryConfigDefinitionParser#postProcessBeanDefinition(org.springframework.data.repository.config.SingleRepositoryConfigInformation, org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.beans.factory.support.BeanDefinitionRegistry, java.lang.Object)
*/
@Override
protected void postProcessBeanDefinition(MongoRepositoryConfiguration context, BeanDefinitionBuilder builder,
BeanDefinitionRegistry registry, Object beanSource) {
builder.addPropertyReference("mongoOperations", context.getMongoTemplateRef());
builder.addPropertyValue("createIndexesForQueryMethods", context.getCreateQueryIndexes());
}
}

View File

@@ -1,80 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.data.config.ParsingUtils;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport;
import org.springframework.data.repository.config.XmlRepositoryConfigurationSource;
import org.w3c.dom.Element;
/**
* {@link RepositoryConfigurationExtension} for MongoDB.
*
* @author Oliver Gierke
*/
public class MongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport {
private static final String MONGO_TEMPLATE_REF = "mongo-template-ref";
private static final String CREATE_QUERY_INDEXES = "create-query-indexes";
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix()
*/
@Override
protected String getModulePrefix() {
return "mongo";
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryClassName()
*/
public String getRepositoryFactoryClassName() {
return MongoRepositoryFactoryBean.class.getName();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource)
*/
@Override
public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) {
Element element = config.getElement();
ParsingUtils.setPropertyReference(builder, element, MONGO_TEMPLATE_REF, "mongoOperations");
ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods");
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource)
*/
@Override
public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) {
AnnotationAttributes attributes = config.getAttributes();
builder.addPropertyReference("mongoOperations", attributes.getString("mongoTemplateRef"));
builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods"));
}
}

View File

@@ -0,0 +1,196 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.config.AutomaticRepositoryConfigInformation;
import org.springframework.data.repository.config.ManualRepositoryConfigInformation;
import org.springframework.data.repository.config.RepositoryConfig;
import org.springframework.data.repository.config.SingleRepositoryConfigInformation;
import org.springframework.util.StringUtils;
import org.w3c.dom.Element;
/**
* {@link RepositoryConfig} implementation to create {@link MongoRepositoryConfiguration} instances for both automatic
* and manual configuration.
*
* @author Oliver Gierke
*/
public class SimpleMongoRepositoryConfiguration
extends
RepositoryConfig<SimpleMongoRepositoryConfiguration.MongoRepositoryConfiguration, SimpleMongoRepositoryConfiguration> {
private static final String MONGO_TEMPLATE_REF = "mongo-template-ref";
private static final String CREATE_QUERY_INDEXES = "create-query-indexes";
private static final String DEFAULT_MONGO_TEMPLATE_REF = "mongoTemplate";
/**
* Creates a new {@link SimpleMongoRepositoryConfiguration} for the given {@link Element}.
*
* @param repositoriesElement
*/
protected SimpleMongoRepositoryConfiguration(Element repositoriesElement) {
super(repositoriesElement, MongoRepositoryFactoryBean.class.getName());
}
/**
* Returns the bean name of the {@link org.springframework.data.mongodb.core.core.MongoTemplate} to be referenced.
*
* @return
*/
public String getMongoTemplateRef() {
String templateRef = getSource().getAttribute(MONGO_TEMPLATE_REF);
return StringUtils.hasText(templateRef) ? templateRef : DEFAULT_MONGO_TEMPLATE_REF;
}
/**
* Returns whether to create indexes for query methods.
*
* @return
*/
public boolean getCreateQueryIndexes() {
String createQueryIndexes = getSource().getAttribute(CREATE_QUERY_INDEXES);
return StringUtils.hasText(createQueryIndexes) ? Boolean.parseBoolean(createQueryIndexes) : false;
}
/*
* (non-Javadoc)
*
* @see
* org.springframework.data.repository.config.GlobalRepositoryConfigInformation
* #getAutoconfigRepositoryInformation(java.lang.String)
*/
public MongoRepositoryConfiguration getAutoconfigRepositoryInformation(String interfaceName) {
return new AutomaticMongoRepositoryConfiguration(interfaceName, this);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfig#getNamedQueriesLocation()
*/
public String getNamedQueriesLocation() {
return "classpath*:META-INF/mongo-named-queries.properties";
}
/*
* (non-Javadoc)
*
* @see org.springframework.data.repository.config.RepositoryConfig#
* createSingleRepositoryConfigInformationFor(org.w3c.dom.Element)
*/
@Override
protected MongoRepositoryConfiguration createSingleRepositoryConfigInformationFor(Element element) {
return new ManualMongoRepositoryConfiguration(element, this);
}
/**
* Simple interface for configuration values specific to Mongo repositories.
*
* @author Oliver Gierke
*/
public interface MongoRepositoryConfiguration extends
SingleRepositoryConfigInformation<SimpleMongoRepositoryConfiguration> {
String getMongoTemplateRef();
boolean getCreateQueryIndexes();
}
/**
* Implements manual lookup of the additional attributes.
*
* @author Oliver Gierke
*/
private static class ManualMongoRepositoryConfiguration extends
ManualRepositoryConfigInformation<SimpleMongoRepositoryConfiguration> implements MongoRepositoryConfiguration {
/**
* Creates a new {@link ManualMongoRepositoryConfiguration} for the given {@link Element} and parent.
*
* @param element
* @param parent
*/
public ManualMongoRepositoryConfiguration(Element element, SimpleMongoRepositoryConfiguration parent) {
super(element, parent);
}
/*
* (non-Javadoc)
*
* @see org.springframework.data.mongodb.repository.config.
* SimpleMongoRepositoryConfiguration
* .MongoRepositoryConfiguration#getMongoTemplateRef()
*/
public String getMongoTemplateRef() {
return getAttribute(MONGO_TEMPLATE_REF);
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.config.SimpleMongoRepositoryConfiguration.MongoRepositoryConfiguration#getCreateQueryIndexes()
*/
public boolean getCreateQueryIndexes() {
String attribute = getAttribute(CREATE_QUERY_INDEXES);
return attribute == null ? false : Boolean.parseBoolean(attribute);
}
}
/**
* Implements the lookup of the additional attributes during automatic configuration.
*
* @author Oliver Gierke
*/
private static class AutomaticMongoRepositoryConfiguration extends
AutomaticRepositoryConfigInformation<SimpleMongoRepositoryConfiguration> implements MongoRepositoryConfiguration {
/**
* Creates a new {@link AutomaticMongoRepositoryConfiguration} for the given interface and parent.
*
* @param interfaceName
* @param parent
*/
public AutomaticMongoRepositoryConfiguration(String interfaceName, SimpleMongoRepositoryConfiguration parent) {
super(interfaceName, parent);
}
/*
* (non-Javadoc)
*
* @see org.springframework.data.mongodb.repository.config.
* SimpleMongoRepositoryConfiguration
* .MongoRepositoryConfiguration#getMongoTemplateRef()
*/
public String getMongoTemplateRef() {
return getParent().getMongoTemplateRef();
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.config.SimpleMongoRepositoryConfiguration.MongoRepositoryConfiguration#getCreateQueryIndexes()
*/
public boolean getCreateQueryIndexes() {
return getParent().getCreateQueryIndexes();
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2002-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -85,7 +85,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
} else if (method.isGeoNearQuery()) {
return new GeoNearExecution(accessor).execute(query);
} else if (method.isCollectionQuery()) {
return new CollectionExecution(accessor.getPageable()).execute(query);
return new CollectionExecution().execute(query);
} else if (method.isPageQuery()) {
return new PagedExecution(accessor.getPageable()).execute(query);
} else {
@@ -133,23 +133,12 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
*/
class CollectionExecution extends Execution {
private final Pageable pageable;
CollectionExecution(Pageable pageable) {
this.pageable = pageable;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
*/
@Override
public Object execute(Query query) {
if (pageable != null) {
query = applyPagination(query, pageable);
}
return readCollection(query);
}
}

View File

@@ -22,7 +22,6 @@ import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.convert.MongoWriter;
import org.springframework.data.mongodb.core.geo.Distance;
import org.springframework.data.mongodb.core.geo.Point;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.repository.query.ParameterAccessor;
import org.springframework.util.Assert;
@@ -111,14 +110,6 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
return writer.convertToMongoType(value);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ParameterAccessor#hasBindableNullValue()
*/
public boolean hasBindableNullValue() {
return delegate.hasBindableNullValue();
}
/**
* Custom {@link Iterator} to convert items before returning them.
*
@@ -138,33 +129,37 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
}
/*
* (non-Javadoc)
* @see java.util.Iterator#hasNext()
*/
* (non-Javadoc)
*
* @see java.util.Iterator#hasNext()
*/
public boolean hasNext() {
return delegate.hasNext();
}
/*
* (non-Javadoc)
* @see java.util.Iterator#next()
*/
* (non-Javadoc)
*
* @see java.util.Iterator#next()
*/
public Object next() {
return delegate.next();
}
/*
* (non-Javadoc)
/* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ConvertingParameterAccessor.PotentiallConvertingIterator#nextConverted()
*/
public Object nextConverted(MongoPersistentProperty property) {
return property.isAssociation() ? writer.toDBRef(next(), property) : getConvertedValue(next());
public Object nextConverted() {
return getConvertedValue(next());
}
/*
* (non-Javadoc)
* @see java.util.Iterator#remove()
*/
* (non-Javadoc)
*
* @see java.util.Iterator#remove()
*/
public void remove() {
delegate.remove();
}
@@ -182,6 +177,6 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
*
* @return
*/
Object nextConverted(MongoPersistentProperty property);
Object nextConverted();
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,8 +20,8 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
import java.util.Collection;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.domain.Sort;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.context.PersistentPropertyPath;
@@ -31,6 +31,7 @@ import org.springframework.data.mongodb.core.geo.Shape;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
import org.springframework.data.mongodb.core.query.OrQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor.PotentiallyConvertingIterator;
import org.springframework.data.repository.query.parser.AbstractQueryCreator;
@@ -44,9 +45,9 @@ import org.springframework.util.Assert;
*
* @author Oliver Gierke
*/
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
class MongoQueryCreator extends AbstractQueryCreator<Query, Query> {
private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class);
private static final Log LOG = LogFactory.getLog(MongoQueryCreator.class);
private final MongoParameterAccessor accessor;
private final boolean isGeoNearQuery;
@@ -87,115 +88,110 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#create(org.springframework.data.repository.query.parser.Part, java.util.Iterator)
*/
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#create(org.springframework.data.repository.query.parser.Part, java.util.Iterator)
*/
@Override
protected Criteria create(Part part, Iterator<Object> iterator) {
protected Query create(Part part, Iterator<Object> iterator) {
if (isGeoNearQuery && part.getType().equals(Type.NEAR)) {
return null;
}
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
MongoPersistentProperty property = path.getLeafProperty();
Criteria criteria = from(part.getType(), property,
Criteria criteria = from(part.getType(),
where(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
(PotentiallyConvertingIterator) iterator);
return criteria;
return new Query(criteria);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#and(org.springframework.data.repository.query.parser.Part, java.lang.Object, java.util.Iterator)
*/
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#and(org.springframework.data.repository.query.parser.Part, java.lang.Object, java.util.Iterator)
*/
@Override
protected Criteria and(Part part, Criteria base, Iterator<Object> iterator) {
protected Query and(Part part, Query base, Iterator<Object> iterator) {
if (base == null) {
return create(part, iterator);
}
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
MongoPersistentProperty property = path.getLeafProperty();
PersistentPropertyPath<MongoPersistentProperty> path2 = context.getPersistentPropertyPath(part.getProperty());
return from(part.getType(), property,
base.and(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
Criteria criteria = from(part.getType(),
where(path2.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
(PotentiallyConvertingIterator) iterator);
return base.addCriteria(criteria);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#or(java.lang.Object, java.lang.Object)
*/
* (non-Javadoc)
*
* @see
* org.springframework.data.repository.query.parser.AbstractQueryCreator
* #or(java.lang.Object, java.lang.Object)
*/
@Override
protected Criteria or(Criteria base, Criteria criteria) {
Criteria result = new Criteria();
return result.orOperator(base, criteria);
protected Query or(Query base, Query query) {
return new OrQuery(new Query[] { base, query });
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.parser.AbstractQueryCreator#complete(java.lang.Object, org.springframework.data.domain.Sort)
*/
* (non-Javadoc)
*
* @see
* org.springframework.data.repository.query.parser.AbstractQueryCreator
* #complete(java.lang.Object, org.springframework.data.domain.Sort)
*/
@Override
protected Query complete(Criteria criteria, Sort sort) {
protected Query complete(Query query, Sort sort) {
if (criteria == null) {
if (query == null) {
return null;
}
Query query = new Query(criteria);
QueryUtils.applySorting(query, sort);
if (LOG.isDebugEnabled()) {
LOG.debug("Created query " + query);
LOG.debug("Created query " + query.getQueryObject());
}
return query;
}
/**
* Populates the given {@link CriteriaDefinition} depending on the {@link Part} given.
* Populates the given {@link CriteriaDefinition} depending on the {@link Type} given.
*
* @param part
* @param property
* @param type
* @param criteria
* @param parameters
* @return
*/
private Criteria from(Type type, MongoPersistentProperty property, Criteria criteria,
PotentiallyConvertingIterator parameters) {
private Criteria from(Type type, Criteria criteria, PotentiallyConvertingIterator parameters) {
switch (type) {
case AFTER:
case GREATER_THAN:
return criteria.gt(parameters.nextConverted(property));
return criteria.gt(parameters.nextConverted());
case GREATER_THAN_EQUAL:
return criteria.gte(parameters.nextConverted(property));
case BEFORE:
return criteria.gte(parameters.nextConverted());
case LESS_THAN:
return criteria.lt(parameters.nextConverted(property));
return criteria.lt(parameters.nextConverted());
case LESS_THAN_EQUAL:
return criteria.lte(parameters.nextConverted(property));
return criteria.lte(parameters.nextConverted());
case BETWEEN:
return criteria.gt(parameters.nextConverted(property)).lt(parameters.nextConverted(property));
return criteria.gt(parameters.nextConverted()).lt(parameters.nextConverted());
case IS_NOT_NULL:
return criteria.ne(null);
case IS_NULL:
return criteria.is(null);
case NOT_IN:
return criteria.nin(nextAsArray(parameters, property));
return criteria.nin(nextAsArray(parameters));
case IN:
return criteria.in(nextAsArray(parameters, property));
return criteria.in(nextAsArray(parameters));
case LIKE:
case STARTING_WITH:
case ENDING_WITH:
case CONTAINING:
String value = parameters.next().toString();
return criteria.regex(toLikeRegex(value, type));
return criteria.regex(toLikeRegex(value));
case REGEX:
return criteria.regex(parameters.next().toString());
case EXISTS:
@@ -226,9 +222,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
Object parameter = parameters.next();
return criteria.within((Shape) parameter);
case SIMPLE_PROPERTY:
return criteria.is(parameters.nextConverted(property));
return criteria.is(parameters.nextConverted());
case NEGATING_SIMPLE_PROPERTY:
return criteria.not().is(parameters.nextConverted(property));
return criteria.not().is(parameters.nextConverted());
}
throw new IllegalArgumentException("Unsupported keyword!");
@@ -254,8 +250,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
parameter.getClass()));
}
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
Object next = iterator.nextConverted(property);
private Object[] nextAsArray(PotentiallyConvertingIterator iterator) {
Object next = iterator.nextConverted();
if (next instanceof Collection) {
return ((Collection<?>) next).toArray();
@@ -266,20 +262,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
return new Object[] { next };
}
private String toLikeRegex(String source, Type type) {
switch (type) {
case STARTING_WITH:
source = source + "*";
break;
case ENDING_WITH:
source = "*" + source;
break;
case CONTAINING:
source = "*" + source + "*";
break;
}
private String toLikeRegex(String source) {
return source.replaceAll("\\*", ".*");
}
}
}

View File

@@ -18,14 +18,13 @@ package org.springframework.data.mongodb.repository.query;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.types.ObjectId;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Query;
import com.mongodb.util.JSON;
/**
* Query to use a plain JSON String to create the {@link Query} to actually execute.
*
@@ -34,7 +33,7 @@ import com.mongodb.util.JSON;
public class StringBasedMongoQuery extends AbstractMongoQuery {
private static final Pattern PLACEHOLDER = Pattern.compile("\\?(\\d+)");
private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class);
private static final Log LOG = LogFactory.getLog(StringBasedMongoQuery.class);
private final String query;
private final String fieldSpec;
@@ -56,9 +55,12 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor)
*/
* (non-Javadoc)
*
* @see
* org.springframework.data.mongodb.repository.AbstractMongoQuery#createQuery(org.springframework.data.
* repository.query.SimpleParameterAccessor, org.springframework.data.mongodb.core.core.support.convert.MongoConverter)
*/
@Override
protected Query createQuery(ConvertingParameterAccessor accessor) {
@@ -97,6 +99,17 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
}
private String getParameterWithIndex(ConvertingParameterAccessor accessor, int index) {
return JSON.serialize(accessor.getBindableValue(index));
Object parameter = accessor.getBindableValue(index);
if (parameter == null) {
return "null";
} else if (parameter instanceof String || parameter.getClass().isEnum()) {
return String.format("\"%s\"", parameter);
} else if (parameter instanceof ObjectId) {
return String.format("{ '$oid' : '%s' }", parameter);
}
return parameter.toString();
}
}

View File

@@ -19,8 +19,8 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.index.Index;
@@ -43,7 +43,7 @@ import org.springframework.util.Assert;
class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTreeMongoQuery> {
private static final Set<Type> GEOSPATIAL_TYPES = new HashSet<Type>(Arrays.asList(Type.NEAR, Type.WITHIN));
private static final Logger LOG = LoggerFactory.getLogger(IndexEnsuringQueryCreationListener.class);
private static final Log LOG = LogFactory.getLog(IndexEnsuringQueryCreationListener.class);
private final MongoOperations operations;

View File

@@ -81,7 +81,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
protected Object getTargetRepository(RepositoryMetadata metadata) {
Class<?> repositoryInterface = metadata.getRepositoryInterface();
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(metadata.getDomainType());
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(metadata.getDomainClass());
if (isQueryDslRepository(repositoryInterface)) {
return new QueryDslMongoRepository(entityInformation, mongoOperations);

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,14 +17,20 @@ package org.springframework.data.mongodb.repository.support;
import java.io.Serializable;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.commons.collections15.Transformer;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
import org.springframework.data.querydsl.EntityPathResolver;
import org.springframework.data.querydsl.QueryDslPredicateExecutor;
@@ -32,10 +38,15 @@ import org.springframework.data.querydsl.SimpleEntityPathResolver;
import org.springframework.data.repository.core.EntityMetadata;
import org.springframework.util.Assert;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mysema.query.mongodb.MongodbQuery;
import com.mysema.query.mongodb.MongodbSerializer;
import com.mysema.query.types.EntityPath;
import com.mysema.query.types.Expression;
import com.mysema.query.types.OrderSpecifier;
import com.mysema.query.types.Path;
import com.mysema.query.types.PathMetadata;
import com.mysema.query.types.Predicate;
import com.mysema.query.types.path.PathBuilder;
@@ -47,6 +58,7 @@ import com.mysema.query.types.path.PathBuilder;
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID> implements
QueryDslPredicateExecutor<T> {
private final MongodbSerializer serializer;
private final PathBuilder<T> builder;
/**
@@ -57,6 +69,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
* @param template
*/
public QueryDslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations) {
this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE);
}
@@ -75,27 +88,40 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
Assert.notNull(resolver);
EntityPath<T> path = resolver.createPath(entityInformation.getJavaType());
this.builder = new PathBuilder<T>(path.getType(), path.getMetadata());
this.serializer = new SpringDataMongodbSerializer(mongoOperations.getConverter());
}
/*
* (non-Javadoc)
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findOne(com.mysema.query.types.Predicate)
*
* @see
* org.springframework.data.mongodb.repository.QueryDslExecutor
* #findOne(com.mysema.query.types.Predicate)
*/
public T findOne(Predicate predicate) {
return createQueryFor(predicate).uniqueResult();
}
/*
* (non-Javadoc)
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate)
*
* @see
* org.springframework.data.mongodb.repository.QueryDslExecutor
* #findAll(com.mysema.query.types.Predicate)
*/
public List<T> findAll(Predicate predicate) {
return createQueryFor(predicate).list();
}
/*
* (non-Javadoc)
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, com.mysema.query.types.OrderSpecifier<?>[])
*
* @see
* org.springframework.data.mongodb.repository.QueryDslExecutor
* #findAll(com.mysema.query.types.Predicate,
* com.mysema.query.types.OrderSpecifier<?>[])
*/
public List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders) {
@@ -104,7 +130,11 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
/*
* (non-Javadoc)
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, org.springframework.data.domain.Pageable)
*
* @see
* org.springframework.data.mongodb.repository.QueryDslExecutor
* #findAll(com.mysema.query.types.Predicate,
* org.springframework.data.domain.Pageable)
*/
public Page<T> findAll(Predicate predicate, Pageable pageable) {
@@ -116,9 +146,13 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
/*
* (non-Javadoc)
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#count(com.mysema.query.types.Predicate)
*
* @see
* org.springframework.data.mongodb.repository.QueryDslExecutor
* #count(com.mysema.query.types.Predicate)
*/
public long count(Predicate predicate) {
return createQueryFor(predicate).count();
}
@@ -130,9 +164,13 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
*/
private MongodbQuery<T> createQueryFor(Predicate predicate) {
Class<T> domainType = getEntityInformation().getJavaType();
MongodbQuery<T> query = new SpringDataMongodbQuery<T>(getMongoOperations(), domainType);
DBCollection collection = getMongoOperations().getCollection(getEntityInformation().getCollectionName());
MongodbQuery<T> query = new MongodbQuery<T>(collection, new Transformer<DBObject, T>() {
public T transform(DBObject input) {
Class<T> type = getEntityInformation().getJavaType();
return getMongoOperations().getConverter().read(type, input);
}
}, serializer);
return query.where(predicate);
}
@@ -187,4 +225,40 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
return new OrderSpecifier(order.isAscending() ? com.mysema.query.types.Order.ASC
: com.mysema.query.types.Order.DESC, property);
}
/**
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
*
* @author Oliver Gierke
*/
static class SpringDataMongodbSerializer extends MongodbSerializer {
private final MongoConverter converter;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
/**
* Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}.
*
* @param mappingContext
*/
public SpringDataMongodbSerializer(MongoConverter converter) {
this.mappingContext = converter.getMappingContext();
this.converter = converter;
}
@Override
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
Path<?> parent = metadata.getParent();
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getExpression().toString());
return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName();
}
@Override
protected DBObject asDBObject(String key, Object value) {
return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value));
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,12 +15,16 @@
*/
package org.springframework.data.mongodb.repository.support;
import org.apache.commons.collections15.Transformer;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.util.Assert;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mysema.query.mongodb.MongodbQuery;
import com.mysema.query.mongodb.MongodbSerializer;
import com.mysema.query.types.EntityPath;
/**
@@ -32,6 +36,7 @@ public abstract class QuerydslRepositorySupport {
private final MongoOperations template;
private final MappingContext<? extends MongoPersistentEntity<?>, ?> context;
private final MongodbSerializer serializer;
/**
* Creates a new {@link QuerydslRepositorySupport} for the given {@link MongoOperations}.
@@ -39,11 +44,10 @@ public abstract class QuerydslRepositorySupport {
* @param operations must not be {@literal null}
*/
public QuerydslRepositorySupport(MongoOperations operations) {
Assert.notNull(operations);
this.template = operations;
this.context = operations.getConverter().getMappingContext();
this.serializer = new MongodbSerializer();
}
/**
@@ -71,6 +75,11 @@ public abstract class QuerydslRepositorySupport {
Assert.notNull(path);
Assert.hasText(collection);
return new SpringDataMongodbQuery<T>(template, path.getType(), collection);
DBCollection dbCollection = template.getCollection(collection);
return new MongodbQuery<T>(dbCollection, new Transformer<DBObject, T>() {
public T transform(DBObject input) {
return template.getConverter().read(path.getType(), input);
}
}, serializer);
}
}

View File

@@ -20,9 +20,7 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
@@ -32,9 +30,9 @@ import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
import org.springframework.data.mongodb.repository.query.QueryUtils;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.util.Assert;
/**
@@ -42,7 +40,7 @@ import org.springframework.util.Assert;
*
* @author Oliver Gierke
*/
public class SimpleMongoRepository<T, ID extends Serializable> implements MongoRepository<T, ID> {
public class SimpleMongoRepository<T, ID extends Serializable> implements PagingAndSortingRepository<T, ID> {
private final MongoOperations mongoOperations;
private final MongoEntityInformation<T, ID> entityInformation;
@@ -65,7 +63,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#save(java.lang.Object)
*/
public <S extends T> S save(S entity) {
public T save(T entity) {
Assert.notNull(entity, "Entity must not be null!");
@@ -77,13 +75,13 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#save(java.lang.Iterable)
*/
public <S extends T> List<S> save(Iterable<S> entities) {
public List<T> save(Iterable<? extends T> entities) {
Assert.notNull(entities, "The given Iterable of entities not be null!");
List<S> result = new ArrayList<S>();
List<T> result = new ArrayList<T>();
for (S entity : entities) {
for (T entity : entities) {
save(entity);
result.add(entity);
}
@@ -173,23 +171,10 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
* @see org.springframework.data.repository.CrudRepository#findAll()
*/
public List<T> findAll() {
return findAll(new Query());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAll(java.lang.Iterable)
*/
public Iterable<T> findAll(Iterable<ID> ids) {
Set<ID> parameters = new HashSet<ID>();
for (ID id : ids) {
parameters.add(id);
}
return findAll(new Query(new Criteria(entityInformation.getIdAttribute()).in(parameters)));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Pageable)

View File

@@ -1,70 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import org.springframework.data.mongodb.core.MongoOperations;
import com.google.common.base.Function;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mysema.query.mongodb.MongodbQuery;
/**
* Spring Data specfic {@link MongodbQuery} implementation.
*
* @author Oliver Gierke
*/
class SpringDataMongodbQuery<T> extends MongodbQuery<T> {
private final MongoOperations operations;
/**
* Creates a new {@link SpringDataMongodbQuery}.
*
* @param operations must not be {@literal null}.
* @param type must not be {@literal null}.
*/
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type) {
this(operations, type, operations.getCollectionName(type));
}
/**
* Creates a new {@link SpringDataMongodbQuery} to query the given collection.
*
* @param operations must not be {@literal null}.
* @param type must not be {@literal null}.
* @param collectionName must not be {@literal null} or empty.
*/
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type, String collectionName) {
super(operations.getCollection(collectionName), new Function<DBObject, T>() {
public T apply(DBObject input) {
return operations.getConverter().read(type, input);
}
}, new SpringDataMongodbSerializer(operations.getConverter()));
this.operations = operations;
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbQuery#getCollection(java.lang.Class)
*/
@Override
protected DBCollection getCollection(Class<?> type) {
return operations.getCollection(operations.getCollectionName(type));
}
}

View File

@@ -1,83 +0,0 @@
/*
* Copyright 2011-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.util.regex.Pattern;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.util.Assert;
import com.mongodb.DBObject;
import com.mysema.query.mongodb.MongodbSerializer;
import com.mysema.query.types.Path;
import com.mysema.query.types.PathMetadata;
/**
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
*
* @author Oliver Gierke
*/
class SpringDataMongodbSerializer extends MongodbSerializer {
private final MongoConverter converter;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final QueryMapper mapper;
/**
* Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}.
*
* @param mappingContext
*/
public SpringDataMongodbSerializer(MongoConverter converter) {
Assert.notNull(converter, "MongoConverter must not be null!");
this.mappingContext = converter.getMappingContext();
this.converter = converter;
this.mapper = new QueryMapper(converter);
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#getKeyForPath(com.mysema.query.types.Path, com.mysema.query.types.PathMetadata)
*/
@Override
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
Path<?> parent = metadata.getParent();
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getExpression().toString());
return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName();
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
*/
@Override
protected DBObject asDBObject(String key, Object value) {
if ("_id".equals(key)) {
return super.asDBObject(key, mapper.convertId(value));
}
return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value));
}
}

View File

@@ -1 +0,0 @@
org.springframework.data.mongodb.repository.cdi.MongoRepositoryExtension

View File

@@ -1,3 +1,2 @@
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd

View File

@@ -1,467 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<xsd:schema xmlns="http://www.springframework.org/schema/data/mongo"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:tool="http://www.springframework.org/schema/tool"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:repository="http://www.springframework.org/schema/data/repository"
targetNamespace="http://www.springframework.org/schema/data/mongo"
elementFormDefault="qualified" attributeFormDefault="unqualified">
<xsd:import namespace="http://www.springframework.org/schema/beans" />
<xsd:import namespace="http://www.springframework.org/schema/tool" />
<xsd:import namespace="http://www.springframework.org/schema/context"
schemaLocation="http://www.springframework.org/schema/context/spring-context.xsd" />
<xsd:import namespace="http://www.springframework.org/schema/data/repository"
schemaLocation="http://www.springframework.org/schema/data/repository/spring-repository.xsd" />
<xsd:element name="mongo" type="mongoType">
<xsd:annotation>
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
Defines a Mongo instance used for accessing MongoDB'.
]]></xsd:documentation>
<xsd:appinfo>
<tool:annotation>
<tool:exports type="com.mongodb.Mongo"/>
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
</xsd:element>
<xsd:element name="db-factory">
<xsd:annotation>
<xsd:documentation><![CDATA[
Defines a MongoDbFactory for connecting to a specific database
]]></xsd:documentation>
</xsd:annotation>
<xsd:complexType>
<xsd:attribute name="id" type="xsd:ID" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
<xsd:annotation>
<xsd:documentation>
The reference to a Mongo. Will default to 'mongo'.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="dbname" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The name of the database to connect to. Default is 'db'.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="port" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The port to connect to MongoDB server. Default is 27017
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="host" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The host to connect to a MongoDB server. Default is localhost
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="username" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The username to use when connecting to a MongoDB server.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="password" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The password to use when connecting to a MongoDB server.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="uri" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The Mongo URI string.]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="write-concern">
<xsd:annotation>
<xsd:documentation>
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
</xsd:documentation>
</xsd:annotation>
<xsd:simpleType>
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
</xsd:simpleType>
</xsd:attribute>
</xsd:complexType>
</xsd:element>
<xsd:attributeGroup name="mongo-repository-attributes">
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" default="mongoTemplate">
<xsd:annotation>
<xsd:documentation>
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="create-query-indexes" type="xsd:boolean" default="false">
<xsd:annotation>
<xsd:documentation>
Enables creation of indexes for queries that get derived from the method name
and thus reference domain class properties. Defaults to false.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
</xsd:attributeGroup>
<xsd:element name="repositories">
<xsd:complexType>
<xsd:complexContent>
<xsd:extension base="repository:repositories">
<xsd:attributeGroup ref="mongo-repository-attributes"/>
<xsd:attributeGroup ref="repository:repository-attributes"/>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
</xsd:element>
<xsd:element name="mapping-converter">
<xsd:annotation>
<xsd:documentation><![CDATA[Defines a MongoConverter for getting rich mapping functionality.]]></xsd:documentation>
<xsd:appinfo>
<tool:exports type="org.springframework.data.mongodb.core.convert.MappingMongoConverter" />
</xsd:appinfo>
</xsd:annotation>
<xsd:complexType>
<xsd:sequence>
<xsd:element name="custom-converters" minOccurs="0">
<xsd:annotation>
<xsd:documentation><![CDATA[
Top-level element that contains one or more custom converters to be used for mapping
domain objects to and from Mongo's DBObject]]>
</xsd:documentation>
</xsd:annotation>
<xsd:complexType>
<xsd:sequence>
<xsd:element name="converter" type="customConverterType" minOccurs="0" maxOccurs="unbounded"/>
</xsd:sequence>
<xsd:attribute name="base-package" type="xsd:string" />
</xsd:complexType>
</xsd:element>
</xsd:sequence>
<xsd:attribute name="id" type="xsd:ID" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The name of the MappingMongoConverter instance (by default "mappingConverter").]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="base-package" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The base package in which to scan for entities annotated with @Document
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation>
The reference to a DbFactory.
</xsd:documentation>
<xsd:appinfo>
<tool:annotation kind="ref">
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
<xsd:annotation>
<xsd:documentation>
The reference to a Mongo. Will default to 'mongo'.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" use="optional">
<xsd:annotation>
<xsd:documentation source="org.springframework.data.mapping.model.MappingContext">
The reference to a MappingContext. Will default to 'mappingContext'.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
<xsd:annotation>
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
</xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="disable-validation" use="optional">
<xsd:annotation>
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener">
Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false.
</xsd:documentation>
</xsd:annotation>
<xsd:simpleType>
<xsd:union memberTypes="xsd:boolean xsd:string"/>
</xsd:simpleType>
</xsd:attribute>
</xsd:complexType>
</xsd:element>
<xsd:element name="jmx">
<xsd:annotation>
<xsd:documentation><![CDATA[
Defines a JMX Model MBeans for monitoring a MongoDB server'.
]]></xsd:documentation>
</xsd:annotation>
<xsd:complexType>
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The name of the Mongo object that determines what server to monitor. (by default "mongo").]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
</xsd:complexType>
</xsd:element>
<xsd:simpleType name="mappingContextRef">
<xsd:annotation>
<xsd:appinfo>
<tool:annotation kind="ref">
<tool:assignable-to type="org.springframework.data.mapping.model.MappingContext"/>
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
<xsd:union memberTypes="xsd:string"/>
</xsd:simpleType>
<xsd:simpleType name="mongoTemplateRef">
<xsd:annotation>
<xsd:appinfo>
<tool:annotation kind="ref">
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
<xsd:union memberTypes="xsd:string"/>
</xsd:simpleType>
<xsd:simpleType name="mongoRef">
<xsd:annotation>
<xsd:appinfo>
<tool:annotation kind="ref">
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
<xsd:union memberTypes="xsd:string"/>
</xsd:simpleType>
<xsd:simpleType name="writeConcernEnumeration">
<xsd:restriction base="xsd:token">
<xsd:enumeration value="NONE" />
<xsd:enumeration value="NORMAL" />
<xsd:enumeration value="SAFE" />
<xsd:enumeration value="FSYNC_SAFE" />
<xsd:enumeration value="REPLICAS_SAFE" />
<xsd:enumeration value="JOURNAL_SAFE" />
<xsd:enumeration value="MAJORITY" />
</xsd:restriction>
</xsd:simpleType>
<!-- MLP
<xsd:attributeGroup name="writeConcern">
<xsd:attribute name="write-concern">
<xsd:simpleType>
<xsd:restriction base="xsd:string">
<xsd:enumeration value="NONE" />
<xsd:enumeration value="NORMAL" />
<xsd:enumeration value="SAFE" />
<xsd:enumeration value="FSYNC_SAFE" />
<xsd:enumeration value="REPLICA_SAFE" />
<xsd:enumeration value="JOURNAL_SAFE" />
<xsd:enumeration value="MAJORITY" />
</xsd:restriction>
</xsd:simpleType>
</xsd:attribute>
</xsd:attributeGroup>
-->
<xsd:complexType name="mongoType">
<xsd:sequence minOccurs="0" maxOccurs="1">
<xsd:element name="options" type="optionsType">
<xsd:annotation>
<xsd:documentation><![CDATA[
The Mongo driver options
]]></xsd:documentation>
<xsd:appinfo>
<tool:annotation>
<tool:exports type="com.mongodb.MongoOptions"/>
</tool:annotation>
</xsd:appinfo>
</xsd:annotation>
</xsd:element>
</xsd:sequence>
<xsd:attribute name="write-concern">
<xsd:annotation>
<xsd:documentation>
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
</xsd:documentation>
</xsd:annotation>
<xsd:simpleType>
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
</xsd:simpleType>
</xsd:attribute>
<!-- MLP
<xsd:attributeGroup ref="writeConcern" />
-->
<xsd:attribute name="id" type="xsd:ID" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The name of the mongo definition (by default "mongo").]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="port" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The port to connect to MongoDB server. Default is 27017
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="host" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The host to connect to a MongoDB server. Default is localhost
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
<xsd:annotation>
<xsd:documentation><![CDATA[
The comma delimited list of host:port entries to use for replica set/pairs.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
</xsd:complexType>
<xsd:complexType name="optionsType">
<xsd:attribute name="connections-per-host" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
then 50 threads can block more than that and an exception will be thrown.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="max-wait-time" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="connect-timeout" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The connect timeout in milliseconds. 0 is default and infinite.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="socket-timeout" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The socket timeout. 0 is default and infinite.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="socket-keep-alive" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="auto-connect-retry" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
This controls whether or not on a connect, the system retries automatically. Default is false.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="max-auto-connect-retry-time" type="xsd:long">
<xsd:annotation>
<xsd:documentation><![CDATA[
The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0, which means to use the default 15s if autoConnectRetry is on.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="write-number" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' option to the getlasterror command. Defaults to 0.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="write-timeout" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command. Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="write-fsync" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
<xsd:attribute name="slave-ok" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
This controls if the driver is allowed to read from secondaries or slaves. Defaults to false.
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
</xsd:complexType>
<xsd:group name="beanElementGroup">
<xsd:choice>
<xsd:element ref="beans:bean"/>
<xsd:element ref="beans:ref"/>
</xsd:choice>
</xsd:group>
<xsd:complexType name="customConverterType">
<xsd:annotation>
<xsd:documentation><![CDATA[
Element defining a custom converterr.
]]></xsd:documentation>
</xsd:annotation>
<xsd:group ref="beanElementGroup" minOccurs="0" maxOccurs="1"/>
<xsd:attribute name="ref" type="xsd:string">
<xsd:annotation>
<xsd:documentation>
A reference to a custom converter.
</xsd:documentation>
<xsd:appinfo>
<tool:annotation kind="ref"/>
</xsd:appinfo>
</xsd:annotation>
</xsd:attribute>
</xsd:complexType>
</xsd:schema>

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -37,7 +37,7 @@ import org.springframework.stereotype.Component;
import com.mongodb.DBObject;
/**
* Integration tests for {@link MappingMongoConverterParser}.
* Integration tests for {@link MongoParser}.
*
* @author Oliver Gierke
*/

View File

@@ -1,68 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.support.BeanDefinitionReader;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.core.io.ClassPathResource;
/**
* Integration test for creation of instance of
* {@link org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener} by defining
* {@code <mongo:mapping-converter />} in context XML.
*
* @see DATAMONGO-36
* @author Maciej Walkowiak
*/
public class MappingMongoConverterParserValidationIntegrationTests {
DefaultListableBeanFactory factory;
BeanDefinitionReader reader;
@Before
public void setUp() {
factory = new DefaultListableBeanFactory();
reader = new XmlBeanDefinitionReader(factory);
}
@Test
public void validatingEventListenerCreatedWithDefaultConfig() {
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-default.xml"));
assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER), is(not(nullValue())));
}
@Test
public void validatingEventListenerCreatedWhenValidationEnabled() {
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-enabled.xml"));
assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER), is(not(nullValue())));
}
@Test(expected = NoSuchBeanDefinitionException.class)
public void validatingEventListenersIsNotCreatedWhenDisabled() {
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-disabled.xml"));
factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER);
}
}

View File

@@ -32,7 +32,6 @@ import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.DB;
import com.mongodb.Mongo;
@@ -60,7 +59,7 @@ public class MongoDbFactoryParserIntegrationTests {
SimpleMongoDbFactory dbFactory = new SimpleMongoDbFactory(new Mongo("localhost"), "database");
dbFactory.setWriteConcern(WriteConcern.SAFE);
dbFactory.getDb();
assertThat(ReflectionTestUtils.getField(dbFactory, "writeConcern"), is((Object) WriteConcern.SAFE));
assertThat(WriteConcern.SAFE, is(dbFactory.getWriteConcern()));
}
@Test
@@ -92,17 +91,15 @@ public class MongoDbFactoryParserIntegrationTests {
private void assertWriteConcern(ClassPathXmlApplicationContext ctx, WriteConcern expectedWriteConcern) {
SimpleMongoDbFactory dbFactory = ctx.getBean("first", SimpleMongoDbFactory.class);
DB db = dbFactory.getDb();
assertThat(db.getName(), is("db"));
assertThat("db", is(db.getName()));
WriteConcern configuredConcern = (WriteConcern) ReflectionTestUtils.getField(dbFactory, "writeConcern");
MyWriteConcern myDbFactoryWriteConcern = new MyWriteConcern(configuredConcern);
MyWriteConcern myDbFactoryWriteConcern = new MyWriteConcern(dbFactory.getWriteConcern());
MyWriteConcern myDbWriteConcern = new MyWriteConcern(db.getWriteConcern());
MyWriteConcern myExpectedWriteConcern = new MyWriteConcern(expectedWriteConcern);
assertThat(myDbFactoryWriteConcern, is(myExpectedWriteConcern));
assertThat(myDbWriteConcern, is(myExpectedWriteConcern));
assertThat(myDbWriteConcern, is(myDbFactoryWriteConcern));
assertThat(myDbFactoryWriteConcern, equalTo(myExpectedWriteConcern));
assertThat(myDbWriteConcern, equalTo(myExpectedWriteConcern));
assertThat(myDbWriteConcern, equalTo(myDbFactoryWriteConcern));
}
// This test will fail since equals in WriteConcern uses == for _w and not .equals
@@ -111,7 +108,7 @@ public class MongoDbFactoryParserIntegrationTests {
String s2 = new String("rack1");
WriteConcern wc1 = new WriteConcern(s1);
WriteConcern wc2 = new WriteConcern(s2);
assertThat(wc1, is(wc2));
assertThat(wc1, equalTo(wc2));
}
@Test

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,7 +16,6 @@
package org.springframework.data.mongodb.config;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.List;
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.MongoFactoryBean;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.CommandResult;
import com.mongodb.Mongo;
@@ -38,45 +36,47 @@ import com.mongodb.ServerAddress;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class MongoNamespaceReplicaSetTests {
public class MongoNamespaceReplicaSetTests extends NamespaceTestSupport {
@Autowired
private ApplicationContext ctx;
@Test
@SuppressWarnings("unchecked")
public void testParsingMongoWithReplicaSets() throws Exception {
assertTrue(ctx.containsBean("replicaSetMongo"));
MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&replicaSetMongo");
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
List<ServerAddress> replicaSetSeeds = readField("replicaSetSeeds", mfb);
assertNotNull(replicaSetSeeds);
assertEquals("127.0.0.1", replicaSetSeeds.get(0).getHost());
assertEquals(10001, replicaSetSeeds.get(0).getPort());
assertEquals("localhost", replicaSetSeeds.get(1).getHost());
assertEquals(10002, replicaSetSeeds.get(1).getPort());
assertThat(replicaSetSeeds, is(notNullValue()));
assertThat(replicaSetSeeds, hasItems(new ServerAddress("127.0.0.1", 10001), new ServerAddress("localhost", 10002)));
}
@Test
@SuppressWarnings("unchecked")
public void testParsingWithPropertyPlaceHolder() throws Exception {
assertTrue(ctx.containsBean("manyReplicaSetMongo"));
MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&manyReplicaSetMongo");
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
List<ServerAddress> replicaSetSeeds = readField("replicaSetSeeds", mfb);
assertNotNull(replicaSetSeeds);
assertEquals("192.168.174.130", replicaSetSeeds.get(0).getHost());
assertEquals(27017, replicaSetSeeds.get(0).getPort());
assertEquals("192.168.174.130", replicaSetSeeds.get(1).getHost());
assertEquals(27018, replicaSetSeeds.get(1).getPort());
assertEquals("192.168.174.130", replicaSetSeeds.get(2).getHost());
assertEquals(27019, replicaSetSeeds.get(2).getPort());
assertThat(replicaSetSeeds, is(notNullValue()));
assertThat(replicaSetSeeds, hasSize(3));
assertThat(
replicaSetSeeds,
hasItems(new ServerAddress("192.168.174.130", 27017), new ServerAddress("192.168.174.130", 27018),
new ServerAddress("192.168.174.130", 27019)));
}
@Test
@Ignore("CI infrastructure does not yet support replica sets")
public void testMongoWithReplicaSets() {
Mongo mongo = ctx.getBean(Mongo.class);
assertEquals(2, mongo.getAllAddress().size());
List<ServerAddress> servers = mongo.getAllAddress();
@@ -88,5 +88,6 @@ public class MongoNamespaceReplicaSetTests {
MongoTemplate template = new MongoTemplate(mongo, "admin");
CommandResult result = template.executeCommand("{replSetGetStatus : 1}");
assertEquals("blort", result.getString("set"));
}
}

View File

@@ -16,19 +16,17 @@
package org.springframework.data.mongodb.config;
import static org.junit.Assert.*;
import static org.springframework.test.util.ReflectionTestUtils.*;
import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoFactoryBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.mongodb.Mongo;
import com.mongodb.MongoOptions;
@@ -64,7 +62,8 @@ public class MongoNamespaceTests {
Mongo mongo = (Mongo) getField(dbf, "mongo");
assertEquals("localhost", mongo.getAddress().getHost());
assertEquals(27017, mongo.getAddress().getPort());
assertEquals(new UserCredentials("joe", "secret"), getField(dbf, "credentials"));
assertEquals("joe", getField(dbf, "username"));
assertEquals("secret", getField(dbf, "password"));
assertEquals("database", getField(dbf, "databaseName"));
}

View File

@@ -0,0 +1,42 @@
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import java.lang.reflect.Field;
public class NamespaceTestSupport {
@SuppressWarnings({ "unchecked" })
public static <T> T readField(String name, Object target) throws Exception {
Field field = null;
Class<?> clazz = target.getClass();
do {
try {
field = clazz.getDeclaredField(name);
} catch (Exception ex) {
}
clazz = clazz.getSuperclass();
} while (field == null && !clazz.equals(Object.class));
if (field == null)
throw new IllegalArgumentException("Cannot find field '" + name + "' in the class hierarchy of "
+ target.getClass());
field.setAccessible(true);
return (T) field.get(target);
}
}

View File

@@ -1,80 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Before;
import org.junit.Test;
import com.mongodb.ServerAddress;
/**
* Unit tests for {@link ServerAddressPropertyEditor}.
*
* @author Oliver Gierke
*/
public class ServerAddressPropertyEditorUnitTests {
ServerAddressPropertyEditor editor;
@Before
public void setUp() {
editor = new ServerAddressPropertyEditor();
}
/**
* @see DATAMONGO-454
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsAddressConfigWithoutASingleParsableServerAddress() {
editor.setAsText("foo, bar");
}
/**
* @see DATAMONGO-454
*/
@Test
public void skipsUnparsableAddressIfAtLeastOneIsParsable() throws UnknownHostException {
editor.setAsText("foo, localhost");
assertSingleAddressOfLocalhost(editor.getValue());
}
/**
* @see DATAMONGO-454
*/
@Test
public void handlesEmptyAddressAsParseError() throws UnknownHostException {
editor.setAsText(", localhost");
assertSingleAddressOfLocalhost(editor.getValue());
}
private static void assertSingleAddressOfLocalhost(Object result) throws UnknownHostException {
assertThat(result, is(instanceOf(ServerAddress[].class)));
Collection<ServerAddress> addresses = Arrays.asList((ServerAddress[]) result);
assertThat(addresses, hasSize(1));
assertThat(addresses, hasItem(new ServerAddress("localhost")));
}
}

View File

@@ -1,43 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.junit.Test;
import com.mongodb.WriteConcern;
/**
* Unit tests for {@link StringToWriteConcernConverter}.
*
* @author Oliver Gierke
*/
public class StringToWriteConcernConverterUnitTest {
StringToWriteConcernConverter converter = new StringToWriteConcernConverter();
@Test
public void createsWellKnownConstantsCorrectly() {
assertThat(converter.convert("SAFE"), is(WriteConcern.SAFE));
}
@Test
public void createsWriteConcernForUnknownValue() {
assertThat(converter.convert("-1"), is(new WriteConcern("-1")));
}
}

View File

@@ -1,53 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import com.mongodb.WriteConcern;
/**
* Unit tests for {@link WriteConcernPropertyEditor}.
*
* @author Oliver Gierke
*/
public class WriteConcernPropertyEditorUnitTests {
WriteConcernPropertyEditor editor;
@Before
public void setUp() {
editor = new WriteConcernPropertyEditor();
}
@Test
public void createsWriteConcernForWellKnownConstants() {
editor.setAsText("SAFE");
assertThat(editor.getValue(), is((Object) WriteConcern.SAFE));
}
@Test
public void createsWriteConcernForUnknownConstants() {
editor.setAsText("-1");
assertThat(editor.getValue(), is((Object) new WriteConcern("-1")));
}
}

View File

@@ -1,65 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import com.mongodb.DB;
import com.mongodb.Mongo;
/**
* Unit tests for {@link MongoDbUtils}.
*
* @author Oliver Gierke
*/
public class MongoDbUtilsUnitTests {
Mongo mongo;
@Before
public void setUp() throws Exception {
this.mongo = new Mongo();
TransactionSynchronizationManager.initSynchronization();
}
@After
public void tearDown() {
for (Object key : TransactionSynchronizationManager.getResourceMap().keySet()) {
TransactionSynchronizationManager.unbindResource(key);
}
TransactionSynchronizationManager.clearSynchronization();
}
@Test
public void returnsNewInstanceForDifferentDatabaseName() {
DB first = MongoDbUtils.getDB(mongo, "first");
assertThat(first, is(notNullValue()));
assertThat(MongoDbUtils.getDB(mongo, "first"), is(first));
DB second = MongoDbUtils.getDB(mongo, "second");
assertThat(second, is(not(first)));
assertThat(MongoDbUtils.getDB(mongo, "second"), is(second));
}
}

View File

@@ -1,52 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.junit.Test;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.data.mongodb.config.WriteConcernPropertyEditor;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.WriteConcern;
/**
* Integration tests for {@link MongoFactoryBean}.
*
* @author Oliver Gierke
*/
public class MongoFactoryBeanIntegrationTest {
/**
* @see DATAMONGO-408
*/
@Test
public void convertsWriteConcernCorrectly() {
RootBeanDefinition definition = new RootBeanDefinition(MongoFactoryBean.class);
definition.getPropertyValues().addPropertyValue("writeConcern", "SAFE");
DefaultListableBeanFactory factory = new DefaultListableBeanFactory();
factory.registerCustomEditor(WriteConcern.class, WriteConcernPropertyEditor.class);
factory.registerBeanDefinition("factory", definition);
MongoFactoryBean bean = factory.getBean("&factory", MongoFactoryBean.class);
assertThat(ReflectionTestUtils.getField(bean, "writeConcern"), is((Object) WriteConcern.SAFE));
}
}

View File

@@ -37,7 +37,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.DBRef;
/**
* Abstract base class for unit tests to specify behaviour we expect from {@link MongoOperations}. Subclasses return
@@ -81,10 +80,6 @@ public abstract class MongoOperationsUnitTests {
public Object convertToMongoType(Object obj) {
return null;
}
public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) {
return null;
}
};
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -39,7 +39,6 @@ import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.converter.Converter;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
@@ -48,7 +47,6 @@ import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.index.Index;
import org.springframework.data.mongodb.core.index.Index.Duplicates;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.Criteria;
@@ -73,7 +71,6 @@ import com.mongodb.WriteResult;
*
* @author Oliver Gierke
* @author Thomas Risberg
* @author Amol Nayak
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:infrastructure.xml")
@@ -83,7 +80,6 @@ public class MongoTemplateTests {
MongoTemplate template;
@Autowired
MongoDbFactory factory;
MongoTemplate mappingTemplate;
@Rule
@@ -103,7 +99,7 @@ public class MongoTemplateTests {
PersonWithIdPropertyOfPrimitiveInt.class, PersonWithIdPropertyOfTypeLong.class,
PersonWithIdPropertyOfPrimitiveLong.class)));
mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
mappingContext.initialize();
mappingContext.afterPropertiesSet();
MappingMongoConverter mappingConverter = new MappingMongoConverter(factory, mappingContext);
mappingConverter.setCustomConversions(conversions);
@@ -134,7 +130,6 @@ public class MongoTemplateTests {
template.dropCollection(template.getCollectionName(PersonWithIdPropertyOfTypeLong.class));
template.dropCollection(template.getCollectionName(PersonWithIdPropertyOfPrimitiveLong.class));
template.dropCollection(template.getCollectionName(TestClass.class));
template.dropCollection(Sample.class);
}
@Test
@@ -164,112 +159,6 @@ public class MongoTemplateTests {
mongoTemplate.updateFirst(q, u, Person.class);
}
/**
* @see DATAMONGO-480
*/
@Test
public void throwsExceptionForDuplicateIds() {
MongoTemplate template = new MongoTemplate(factory);
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
Person person = new Person(new ObjectId(), "Amol");
person.setAge(28);
template.insert(person);
try {
template.insert(person);
fail("Expected DataIntegrityViolationException!");
} catch (DataIntegrityViolationException e) {
assertThat(e.getMessage(), containsString("E11000 duplicate key error index: database.person.$_id_ dup key:"));
}
}
/**
* @see DATAMONGO-480
*/
@Test
public void throwsExceptionForUpdateWithInvalidPushOperator() {
MongoTemplate template = new MongoTemplate(factory);
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
ObjectId id = new ObjectId();
Person person = new Person(id, "Amol");
person.setAge(28);
template.insert(person);
try {
Query query = new Query(Criteria.where("firstName").is("Amol"));
Update upd = new Update().push("age", 29);
template.updateFirst(query, upd, Person.class);
fail("Expected DataIntegrityViolationException!");
} catch (DataIntegrityViolationException e) {
assertThat(e.getMessage(),
is("Execution of update with '{ \"$push\" : { \"age\" : 29}}'' using '{ \"firstName\" : \"Amol\"}' "
+ "query failed: Cannot apply $push/$pushAll modifier to non-array"));
}
}
/**
* @see DATAMONGO-480
*/
@Test
public void throwsExceptionForIndexViolationIfConfigured() {
MongoTemplate template = new MongoTemplate(factory);
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
template.indexOps(Person.class).ensureIndex(new Index().on("firstName", Order.DESCENDING).unique());
Person person = new Person(new ObjectId(), "Amol");
person.setAge(28);
template.save(person);
person = new Person(new ObjectId(), "Amol");
person.setAge(28);
try {
template.save(person);
fail("Expected DataIntegrityViolationException!");
} catch (DataIntegrityViolationException e) {
assertThat(e.getMessage(),
containsString("E11000 duplicate key error index: database.person.$firstName_-1 dup key:"));
}
}
/**
* @see DATAMONGO-480
*/
@Test
public void rejectsDuplicateIdInInsertAll() {
MongoTemplate template = new MongoTemplate(factory);
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
ObjectId id = new ObjectId();
Person person = new Person(id, "Amol");
person.setAge(28);
List<Person> records = new ArrayList<Person>();
records.add(person);
records.add(person);
try {
template.insertAll(records);
fail("Expected DataIntegrityViolationException!");
} catch (DataIntegrityViolationException e) {
assertThat(
e.getMessage(),
startsWith("Insert list failed: E11000 duplicate key error index: database.person.$_id_ dup key: { : ObjectId"));
}
}
@Test
public void testEnsureIndex() throws Exception {
@@ -300,17 +189,14 @@ public class MongoTemplateTests {
assertThat(dropDupes, is(true));
List<IndexInfo> indexInfoList = template.indexOps(Person.class).getIndexInfo();
System.out.println(indexInfoList);
assertThat(indexInfoList.size(), is(2));
IndexInfo ii = indexInfoList.get(1);
assertThat(ii.isUnique(), is(true));
assertThat(ii.isDropDuplicates(), is(true));
assertThat(ii.isSparse(), is(false));
List<IndexField> indexFields = ii.getIndexFields();
IndexField field = indexFields.get(0);
assertThat(field, is(IndexField.create("age", Order.DESCENDING)));
assertThat(ii.getFieldSpec().containsKey("age"), is(true));
assertThat(ii.getFieldSpec().containsValue(Order.DESCENDING), is(true));
}
@Test
@@ -1020,6 +906,7 @@ public class MongoTemplateTests {
assertThat(lastMongoAction.getEntityClass().toString(), is(PersonWithIdPropertyOfTypeObjectId.class.toString()));
assertThat(lastMongoAction.getMongoActionOperation(), is(MongoActionOperation.UPDATE));
assertThat(lastMongoAction.getQuery(), equalTo(q.getQueryObject()));
assertThat(lastMongoAction.getDocument(), equalTo(u.getUpdateObject()));
}
@@ -1046,7 +933,7 @@ public class MongoTemplateTests {
DBRef first = new DBRef(factory.getDb(), "foo", new ObjectId());
DBRef second = new DBRef(factory.getDb(), "bar", new ObjectId());
template.updateFirst(null, update("dbRefs", Arrays.asList(first, second)), ClassWithDBRefs.class);
template.updateFirst(null, Update.update("dbRefs", Arrays.asList(first, second)), ClassWithDBRefs.class);
}
class ClassWithDBRefs {
@@ -1193,62 +1080,10 @@ public class MongoTemplateTests {
assertThat(template.findOne(query(where("id").is(id)), Sample.class), is(nullValue()));
}
/**
* @see DATAMONGO-423
*/
@Test
public void executesQueryWithNegatedRegexCorrectly() {
Sample first = new Sample();
first.field = "Matthews";
Sample second = new Sample();
second.field = "Beauford";
template.save(first);
template.save(second);
Query query = query(where("field").not().regex("Matthews"));
List<Sample> result = template.find(query, Sample.class);
assertThat(result.size(), is(1));
assertThat(result.get(0).field, is("Beauford"));
}
/**
* @see DATAMONGO-447
*/
@Test
public void storesAndRemovesTypeWithComplexId() {
MyId id = new MyId();
id.first = "foo";
id.second = "bar";
TypeWithMyId source = new TypeWithMyId();
source.id = id;
template.save(source);
template.remove(query(where("id").is(id)), TypeWithMyId.class);
}
static class MyId {
String first;
String second;
}
static class TypeWithMyId {
@Id
MyId id;
}
public static class Sample {
@Id
String id;
String field;
}
static class TestClass {

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2010-2012 the original author or authors.
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,12 +17,9 @@ package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.math.BigInteger;
import java.util.Collections;
import java.util.regex.Pattern;
import org.bson.types.ObjectId;
import org.junit.Before;
@@ -32,17 +29,10 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.convert.converter.Converter;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.DB;
@@ -61,24 +51,20 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
MongoTemplate template;
@Mock
MongoDbFactory factory;
@Mock
Mongo mongo;
@Mock
DB db;
@Mock
DBCollection collection;
MappingMongoConverter converter;
@Before
public void setUp() {
this.template = new MongoTemplate(mongo, "database");
this.converter = new MappingMongoConverter(factory, new MongoMappingContext());
this.template = new MongoTemplate(factory, converter);
when(factory.getDb()).thenReturn(db);
when(mongo.getDB("database")).thenReturn(db);
when(db.getCollection(Mockito.any(String.class))).thenReturn(collection);
}
@@ -140,8 +126,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test
public void autogeneratesIdForEntityWithAutogeneratableId() {
this.converter.afterPropertiesSet();
MongoTemplate template = spy(this.template);
doReturn(new ObjectId()).when(template).saveDBObject(Mockito.any(String.class), Mockito.any(DBObject.class),
Mockito.any(Class.class));
@@ -152,52 +136,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
assertThat(entity.id, is(notNullValue()));
}
/**
* @see DATAMONGO-374
*/
@Test
public void convertsUpdateConstraintsUsingConverters() {
CustomConversions conversions = new CustomConversions(Collections.singletonList(MyConverter.INSTANCE));
this.converter.setCustomConversions(conversions);
this.converter.afterPropertiesSet();
Query query = new Query();
Update update = new Update().set("foo", new AutogenerateableId());
template.updateFirst(query, update, Wrapper.class);
QueryMapper queryMapper = new QueryMapper(converter);
DBObject reference = queryMapper.getMappedObject(update.getUpdateObject(), null);
verify(collection, times(1)).update(Mockito.any(DBObject.class), eq(reference), anyBoolean(), anyBoolean());
}
/**
* @see DATAMONGO-474
*/
@Test
public void setsUnpopulatedIdField() {
NotAutogenerateableId entity = new NotAutogenerateableId();
template.populateIdIfNecessary(entity, 5);
assertThat(entity.id, is(5));
}
/**
* @see DATAMONGO-474
*/
@Test
public void doesNotSetAlreadyPopulatedId() {
NotAutogenerateableId entity = new NotAutogenerateableId();
entity.id = 5;
template.populateIdIfNecessary(entity, 7);
assertThat(entity.id, is(5));
}
class AutogenerateableId {
@Id
@@ -208,24 +146,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Id
Integer id;
public Pattern getId() {
return Pattern.compile(".");
}
}
enum MyConverter implements Converter<AutogenerateableId, String> {
INSTANCE;
public String convert(AutogenerateableId source) {
return source.toString();
}
}
class Wrapper {
AutogenerateableId foo;
}
/**

View File

@@ -1,66 +0,0 @@
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.springframework.data.mongodb.core.SerializationUtils.*;
import java.util.Arrays;
import org.hamcrest.Matcher;
import org.junit.Test;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
/**
* Unit tests for {@link SerializationUtils}.
*
* @author Oliver Gierke
*/
public class SerializationUtilsUnitTests {
@Test
public void writesSimpleDBObject() {
DBObject dbObject = new BasicDBObject("foo", "bar");
assertThat(serializeToJsonSafely(dbObject), is("{ \"foo\" : \"bar\"}"));
}
@Test
public void writesComplexObjectAsPlainToString() {
DBObject dbObject = new BasicDBObject("foo", new Complex());
assertThat(serializeToJsonSafely(dbObject),
startsWith("{ \"foo\" : { $java : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex"));
}
@Test
public void writesCollection() {
DBObject dbObject = new BasicDBObject("foo", Arrays.asList("bar", new Complex()));
Matcher<String> expectedOutput = allOf(
startsWith("{ \"foo\" : [ \"bar\", { $java : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex"),
endsWith(" } ] }"));
assertThat(serializeToJsonSafely(dbObject), is(expectedOutput));
}
static class Complex {
}
}

View File

@@ -15,8 +15,8 @@
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import java.net.UnknownHostException;
@@ -24,7 +24,6 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.test.util.ReflectionTestUtils;
@@ -71,8 +70,8 @@ public class SimpleMongoDbFactoryUnitTests {
MongoURI mongoURI = new MongoURI("mongodb://myUsername:myPassword@localhost/myDatabase.myCollection");
MongoDbFactory mongoDbFactory = new SimpleMongoDbFactory(mongoURI);
assertThat(ReflectionTestUtils.getField(mongoDbFactory, "credentials"), is((Object) new UserCredentials(
"myUsername", "myPassword")));
assertThat(ReflectionTestUtils.getField(mongoDbFactory, "username").toString(), is("myUsername"));
assertThat(ReflectionTestUtils.getField(mongoDbFactory, "password").toString(), is("myPassword"));
assertThat(ReflectionTestUtils.getField(mongoDbFactory, "databaseName").toString(), is("myDatabase"));
assertThat(ReflectionTestUtils.getField(mongoDbFactory, "databaseName").toString(), is("myDatabase"));
}

View File

@@ -2,14 +2,12 @@ package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.List;
import com.mongodb.Mongo;
import org.springframework.context.annotation.Bean;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import com.mongodb.Mongo;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
public class TestMongoConfiguration extends AbstractMongoConfiguration {
@@ -26,15 +24,15 @@ public class TestMongoConfiguration extends AbstractMongoConfiguration {
@Override
public String getMappingBasePackage() {
return MongoMappingContext.class.getPackage().getName();
return "org.springframework.data.mongodb.core.core.mapping";
}
@Override
public CustomConversions customConversions() {
protected void afterMappingMongoConverterCreation(MappingMongoConverter converter) {
List<Converter<?, ?>> converters = new ArrayList<Converter<?, ?>>();
converters.add(new org.springframework.data.mongodb.core.PersonReadConverter());
converters.add(new org.springframework.data.mongodb.core.PersonWriteConverter());
return new CustomConversions(converters);
converter.setCustomConversions(new CustomConversions(converters));
}
}

View File

@@ -3,7 +3,6 @@ package org.springframework.data.mongodb.core.convert;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.net.URL;
import java.text.DateFormat;
import java.text.Format;
import java.util.Arrays;
@@ -154,22 +153,13 @@ public class CustomConversionsUnitTests {
}
/**
* @see DATAMONGO-462
* @see DATAMONGO-390
*/
@Test
public void hasWriteConverterForURL() {
public void convertsUUIDsToBinaryByDefault() {
CustomConversions conversions = new CustomConversions();
assertThat(conversions.hasCustomWriteTarget(URL.class), is(true));
}
/**
* @see DATAMONGO-462
*/
@Test
public void readTargetForURL() {
CustomConversions conversions = new CustomConversions();
assertThat(conversions.hasCustomReadTarget(String.class, URL.class), is(true));
assertThat(conversions.hasCustomWriteTarget(UUID.class), is(true));
}
enum FormatToStringConverter implements Converter<Format, String> {

View File

@@ -15,12 +15,13 @@
*/
package org.springframework.data.mongodb.core.convert;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.util.Arrays;
import java.util.HashSet;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
@@ -31,12 +32,11 @@ import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
/**
* Test case to verify correct usage of custom {@link Converter} implementations to be used.
*
@@ -71,7 +71,7 @@ public class CustomConvertersUnitTests {
context = new MongoMappingContext();
context.setInitialEntitySet(new HashSet<Class<?>>(Arrays.asList(Foo.class, Bar.class)));
context.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
context.initialize();
context.afterPropertiesSet();
converter = new MappingMongoConverter(mongoDbFactory, context);
converter.setCustomConversions(conversions);

Some files were not shown because too many files have changed in this diff Show More