Compare commits
34 Commits
issue/4462
...
labs/antor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
994a10006e | ||
|
|
161ec9aacb | ||
|
|
300ea4d332 | ||
|
|
2c9acd5eff | ||
|
|
3dcd0a3ca2 | ||
|
|
d7094416a4 | ||
|
|
4994b04b52 | ||
|
|
fc4e7f8ad4 | ||
|
|
6094bd1480 | ||
|
|
ddc8843fc1 | ||
|
|
ab4773931b | ||
|
|
0a44a2d028 | ||
|
|
4921c9376e | ||
|
|
823e0d02fc | ||
|
|
59052bd8ef | ||
|
|
4f8d420b43 | ||
|
|
c08176ac85 | ||
|
|
da0d49d261 | ||
|
|
e2326618a1 | ||
|
|
678b0a65b2 | ||
|
|
ac82885697 | ||
|
|
f052ae1794 | ||
|
|
2f1777e2fb | ||
|
|
5c1d5abaf7 | ||
|
|
f5d70ca8bb | ||
|
|
73050198c5 | ||
|
|
ab804d5041 | ||
|
|
8cc3d466d8 | ||
|
|
f289404079 | ||
|
|
c5c8c41879 | ||
|
|
38e5e1dfb4 | ||
|
|
099213bbf2 | ||
|
|
1749f9b485 | ||
|
|
4f71a78302 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -15,4 +15,9 @@ src/ant/.ant-targets-upload-dist.xml
|
||||
atlassian-ide-plugin.xml
|
||||
/.gradle/
|
||||
/.idea/
|
||||
*.graphml
|
||||
*.graphml
|
||||
build/
|
||||
node_modules
|
||||
node
|
||||
package.json
|
||||
package-lock.json
|
||||
|
||||
@@ -21,60 +21,35 @@
|
||||
|
||||
<properties>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
<dist.key>SDMONGO</dist.key>
|
||||
|
||||
<!-- Observability -->
|
||||
<micrometer-docs-generator.inputPath>${maven.multiModuleProjectDirectory}/spring-data-mongodb/</micrometer-docs-generator.inputPath>
|
||||
<micrometer-docs-generator.inclusionPattern>.*</micrometer-docs-generator.inclusionPattern>
|
||||
<micrometer-docs-generator.outputPath>${maven.multiModuleProjectDirectory}/target/</micrometer-docs-generator.outputPath>
|
||||
<antora.playbook>${project.basedir}/../src/main/antora/antora-playbook.yml</antora.playbook>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${project.basedir}/../src/main/antora/resources/antora-resources</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>resources</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-docs</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>io.micrometer.docs.DocsGeneratorCommand</mainClass>
|
||||
<includePluginDependencies>true</includePluginDependencies>
|
||||
<arguments>
|
||||
<argument>${micrometer-docs-generator.inputPath}</argument>
|
||||
<argument>${micrometer-docs-generator.inclusionPattern}</argument>
|
||||
<argument>${micrometer-docs-generator.outputPath}</argument>
|
||||
</arguments>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-docs-generator</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}
|
||||
</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
<groupId>io.spring.maven.antora</groupId>
|
||||
<artifactId>antora-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// tag::file[]
|
||||
package org.springframework.data.mongodb.example;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
public class MongoApplication {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
MongoOperations mongoOps = new MongoTemplate(MongoClients.create(), "database");
|
||||
mongoOps.insert(new Person("Joe", 34));
|
||||
|
||||
System.out.println(mongoOps.query(Person.class).matching(where("name").is("Joe")).firstValue());
|
||||
|
||||
mongoOps.dropCollection("person");
|
||||
}
|
||||
}
|
||||
//end::file[]
|
||||
@@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// tag::file[]
|
||||
package org.springframework.data.mongodb.example;
|
||||
|
||||
// tag::class[]
|
||||
public class Person {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private int age;
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
|
||||
}
|
||||
}
|
||||
// end::class[]
|
||||
// end::file[]
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// tag::file[]
|
||||
package org.springframework.data.mongodb.example;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
public class ReactiveMongoApplication {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
ReactiveMongoOperations mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database");
|
||||
|
||||
mongoOps.insert(new Person("Joe", 34))
|
||||
.then(mongoOps.query(Person.class).matching(where("name").is("Joe")).first())
|
||||
.doOnNext(System.out::println)
|
||||
.block();
|
||||
|
||||
mongoOps.dropCollection("person").block();
|
||||
}
|
||||
}
|
||||
// end::file[]
|
||||
42
src/main/antora/antora-playbook.yml
Normal file
42
src/main/antora/antora-playbook.yml
Normal file
@@ -0,0 +1,42 @@
|
||||
# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2
|
||||
#
|
||||
# The purpose of this Antora playbook is to build the docs in the current branch.
|
||||
antora:
|
||||
extensions:
|
||||
- '@antora/collector-extension'
|
||||
- require: '@springio/antora-extensions/root-component-extension'
|
||||
root_component_name: 'data-mongodb'
|
||||
site:
|
||||
title: Spring Data MongoDB
|
||||
url: https://docs.spring.io/spring-data-mongodb/reference/
|
||||
content:
|
||||
sources:
|
||||
- url: ./../../..
|
||||
branches: HEAD
|
||||
start_path: src/main/antora
|
||||
worktrees: true
|
||||
- url: https://github.com/spring-projects/spring-data-commons
|
||||
# Refname matching:
|
||||
# https://docs.antora.org/antora/latest/playbook/content-refname-matching/
|
||||
branches: [ main, 3.2.x ]
|
||||
start_path: src/main/antora
|
||||
asciidoc:
|
||||
attributes:
|
||||
page-pagination: ''
|
||||
hide-uri-scheme: '@'
|
||||
tabs-sync-option: '@'
|
||||
chomp: 'all'
|
||||
extensions:
|
||||
- '@asciidoctor/tabs'
|
||||
- '@springio/asciidoctor-extensions'
|
||||
sourcemap: true
|
||||
urls:
|
||||
latest_version_segment: ''
|
||||
runtime:
|
||||
log:
|
||||
failure_level: warn
|
||||
format: pretty
|
||||
ui:
|
||||
bundle:
|
||||
url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.3.5/ui-bundle.zip
|
||||
snapshot: true
|
||||
12
src/main/antora/antora.yml
Normal file
12
src/main/antora/antora.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
name: data-mongodb
|
||||
version: true
|
||||
title: Spring Data MongoDB
|
||||
nav:
|
||||
- modules/ROOT/nav.adoc
|
||||
ext:
|
||||
collector:
|
||||
- run:
|
||||
command: ./mvnw validate process-resources -pl :spring-data-mongodb-distribution -am -Pantora-process-resources
|
||||
local: true
|
||||
scan:
|
||||
dir: spring-data-mongodb-distribution/target/classes/
|
||||
|
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
1
src/main/antora/modules/ROOT/examples/example
Symbolic link
1
src/main/antora/modules/ROOT/examples/example
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../../../../spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example
|
||||
63
src/main/antora/modules/ROOT/nav.adoc
Normal file
63
src/main/antora/modules/ROOT/nav.adoc
Normal file
@@ -0,0 +1,63 @@
|
||||
* xref:index.adoc[Overview]
|
||||
** xref:commons/upgrade.adoc[]
|
||||
** xref:migration-guides.adoc[]
|
||||
*** xref:migration-guide/migration-guide-2.x-to-3.x.adoc[]
|
||||
*** xref:migration-guide/migration-guide-3.x-to-4.x.adoc[]
|
||||
|
||||
* xref:mongodb.adoc[]
|
||||
** xref:mongodb/getting-started.adoc[]
|
||||
** xref:mongodb/configuration.adoc[]
|
||||
|
||||
** xref:mongodb/template-api.adoc[]
|
||||
*** xref:mongodb/template-config.adoc[]
|
||||
*** xref:mongodb/template-collection-management.adoc[]
|
||||
*** xref:mongodb/template-crud-operations.adoc[]
|
||||
*** xref:mongodb/template-query-operations.adoc[]
|
||||
*** xref:mongodb/aggregation-framework.adoc[]
|
||||
*** xref:mongodb/template-document-count.adoc[]
|
||||
|
||||
** xref:mongodb/template-gridfs.adoc[]
|
||||
** xref:mongodb/mapping/mapping.adoc[]
|
||||
*** xref:mongodb/mapping/mapping-schema.adoc[]
|
||||
*** xref:mongodb/mapping/custom-conversions.adoc[Type based Converter]
|
||||
*** xref:mongodb/mapping/property-converters.adoc[]
|
||||
*** xref:mongodb/mapping/unwrapping-entities.adoc[]
|
||||
*** xref:mongodb/mapping/document-references.adoc[Object References]
|
||||
*** xref:mongodb/mapping/mapping-index-management.adoc[]
|
||||
|
||||
** xref:mongodb/lifecycle-events.adoc[]
|
||||
** xref:mongodb/auditing.adoc[]
|
||||
** xref:mongodb/client-session-transactions.adoc[]
|
||||
** xref:mongodb/change-streams.adoc[]
|
||||
** xref:mongodb/tailable-cursors.adoc[]
|
||||
** xref:mongodb/sharding.adoc[]
|
||||
** xref:mongodb/mongo-encryption.adoc[]
|
||||
|
||||
// Repository
|
||||
* xref:repositories.adoc[]
|
||||
** xref:repositories/core-concepts.adoc[]
|
||||
** xref:repositories/definition.adoc[]
|
||||
** xref:mongodb/repositories/repositories.adoc[]
|
||||
** xref:repositories/create-instances.adoc[]
|
||||
** xref:repositories/query-methods-details.adoc[]
|
||||
** xref:mongodb/repositories/query-methods.adoc[]
|
||||
** xref:repositories/projections.adoc[]
|
||||
** xref:repositories/custom-implementations.adoc[]
|
||||
** xref:repositories/core-domain-events.adoc[]
|
||||
** xref:repositories/null-handling.adoc[]
|
||||
** xref:mongodb/repositories/cdi-integration.adoc[]
|
||||
** xref:repositories/query-keywords-reference.adoc[]
|
||||
** xref:repositories/query-return-types-reference.adoc[]
|
||||
|
||||
// Observability
|
||||
* xref:observability/observability.adoc[]
|
||||
** xref:observability/conventions.adoc[]
|
||||
** xref:observability/metrics.adoc[]
|
||||
** xref:observability/spans.adoc[]
|
||||
|
||||
* xref:kotlin.adoc[]
|
||||
** xref:kotlin/requirements.adoc[]
|
||||
** xref:kotlin/null-safety.adoc[]
|
||||
** xref:kotlin/extensions.adoc[]
|
||||
** xref:kotlin/coroutines.adoc[]
|
||||
|
||||
1
src/main/antora/modules/ROOT/pages/commons/upgrade.adoc
Normal file
1
src/main/antora/modules/ROOT/pages/commons/upgrade.adoc
Normal file
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$upgrade.adoc[]
|
||||
22
src/main/antora/modules/ROOT/pages/index.adoc
Normal file
22
src/main/antora/modules/ROOT/pages/index.adoc
Normal file
@@ -0,0 +1,22 @@
|
||||
[[spring-data-mongodb-reference-documentation]]
|
||||
= Spring Data MongoDB
|
||||
:revnumber: {version}
|
||||
:revdate: {localdate}
|
||||
:feature-scroll: true
|
||||
|
||||
_Spring Data MongoDB provides support for the MongoDB database.
|
||||
It uses familiar Spring concepts such as a template classes for core API usage and lightweight repository style data access to ease development of applications with a consistent programming model._
|
||||
|
||||
[horizontal]
|
||||
xref:mongodb.adoc[MongoDB] :: MongoDB support and connectivity
|
||||
xref:repositories.adoc[Repositories] :: Mongo Repositories
|
||||
xref:observability/observability.adoc[Observability] :: Observability Integration
|
||||
xref:kotlin.adoc[Kotlin] :: Kotlin support
|
||||
// xref:migration-guides.adoc[Migration] :: Migration Guides
|
||||
https://github.com/spring-projects/spring-data-commons/wiki[Wiki] :: What's New, Upgrade Notes, Supported Versions, additional cross-version information.
|
||||
|
||||
Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch; Jay Bryant
|
||||
|
||||
(C) 2008-2022 VMware Inc.
|
||||
|
||||
Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically.
|
||||
1
src/main/antora/modules/ROOT/pages/kotlin.adoc
Normal file
1
src/main/antora/modules/ROOT/pages/kotlin.adoc
Normal file
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$kotlin.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$kotlin/coroutines.adoc[]
|
||||
65
src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc
Normal file
65
src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc
Normal file
@@ -0,0 +1,65 @@
|
||||
include::{commons}@data-commons::page$kotlin/extensions.adoc[]
|
||||
|
||||
To retrieve a list of `SWCharacter` objects in Java, you would normally write the following:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Flux<SWCharacter> characters = template.query(SWCharacter.class).inTable("star-wars").all()
|
||||
----
|
||||
|
||||
With Kotlin and the Spring Data extensions, you can instead write the following:
|
||||
|
||||
[source,kotlin]
|
||||
----
|
||||
val characters = template.query<SWCharacter>().inTable("star-wars").all()
|
||||
// or (both are equivalent)
|
||||
val characters : Flux<SWCharacter> = template.query().inTable("star-wars").all()
|
||||
----
|
||||
|
||||
As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax.
|
||||
|
||||
[[mongo.query.kotlin-support]]
|
||||
== Type-safe Queries for Kotlin
|
||||
|
||||
Kotlin embraces domain-specific language creation through its language syntax and its extension system.
|
||||
Spring Data MongoDB ships with a Kotlin Extension for `Criteria` using https://kotlinlang.org/docs/reference/reflection.html#property-references[Kotlin property references] to build type-safe queries.
|
||||
Queries using this extension are typically benefit from improved readability.
|
||||
Most keywords on `Criteria` have a matching Kotlin extension, such as `inValues` and `regex`.
|
||||
|
||||
Consider the following example explaining Type-safe Queries:
|
||||
|
||||
====
|
||||
[source,kotlin]
|
||||
----
|
||||
import org.springframework.data.mongodb.core.query.*
|
||||
|
||||
mongoOperations.find<Book>(
|
||||
Query(Book::title isEqualTo "Moby-Dick") <1>
|
||||
)
|
||||
|
||||
mongoOperations.find<Book>(
|
||||
Query(titlePredicate = Book::title exists true)
|
||||
)
|
||||
|
||||
mongoOperations.find<Book>(
|
||||
Query(
|
||||
Criteria().andOperator(
|
||||
Book::price gt 5,
|
||||
Book::price lt 10
|
||||
))
|
||||
)
|
||||
|
||||
// Binary operators
|
||||
mongoOperations.find<BinaryMessage>(
|
||||
Query(BinaryMessage::payload bits { allClear(0b101) }) <2>
|
||||
)
|
||||
|
||||
// Nested Properties (i.e. refer to "book.author")
|
||||
mongoOperations.find<Book>(
|
||||
Query(Book::author / Author::name regex "^H") <3>
|
||||
)
|
||||
----
|
||||
<1> `isEqualTo()` is an infix extension function with receiver type `KProperty<T>` that returns `Criteria`.
|
||||
<2> For bitwise operators, pass a lambda argument where you call one of the methods of `Criteria.BitwiseCriteriaOperators`.
|
||||
<3> To construct nested properties, use the `/` character (overloaded operator `div`).
|
||||
====
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$kotlin/null-safety.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$kotlin/requirements.adoc[]
|
||||
@@ -1,12 +1,11 @@
|
||||
[[migrating]]
|
||||
= Migrating
|
||||
[[mongodb.migration.2.x-3.x]]
|
||||
= Migration Guide from 2.x to 3.x
|
||||
|
||||
This chapter coverts major changes and outlines migration steps.
|
||||
Spring Data MongoDB 3.x requires the MongoDB Java Driver 4.x +
|
||||
To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation].
|
||||
|
||||
[[migrating-2.x-to-3.0]]
|
||||
== 2.x to 3.0
|
||||
|
||||
=== Dependency Changes
|
||||
[[dependency-changes]]
|
||||
== Dependency Changes
|
||||
|
||||
* `org.mongodb:mongo-java-driver` (uber jar) got replaced with:
|
||||
** bson-jar
|
||||
@@ -16,25 +15,29 @@ This chapter coverts major changes and outlines migration steps.
|
||||
The change in dependencies allows usage of the reactive support without having to pull the synchronous driver.
|
||||
NOTE: The new sync driver does no longer support `com.mongodb.DBObject`. Please use `org.bson.Document` instead.
|
||||
|
||||
=== Signature Changes
|
||||
[[signature-changes]]
|
||||
== Signature Changes
|
||||
|
||||
* `MongoTemplate` no longer supports `com.mongodb.MongoClient` and `com.mongodb.MongoClientOptions`.
|
||||
Please use `com.mongodb.client.MongoClient` and `com.mongodb.MongoClientSettings` instead.
|
||||
|
||||
In case you're using `AbstractMongoConfiguration` please switch to `AbstractMongoClientConfiguration`.
|
||||
|
||||
=== Namespace Changes
|
||||
[[namespace-changes]]
|
||||
== Namespace Changes
|
||||
|
||||
The switch to `com.mongodb.client.MongoClient` requires an update of your configuration XML if you have one.
|
||||
The best way to provide required connection information is by using a connection string.
|
||||
Please see the https://docs.mongodb.com/manual/reference/connection-string/[MongoDB Documentation] for details.
|
||||
|
||||
[source,xml]
|
||||
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<mongo:mongo.mongo-client id="with-defaults" />
|
||||
----
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<context:property-placeholder location="classpath:..."/>
|
||||
|
||||
@@ -45,6 +48,7 @@ Please see the https://docs.mongodb.com/manual/reference/connection-string/[Mong
|
||||
connection-string="mongodb://${mongo.host}:${mongo.port}/?replicaSet=rs0" />
|
||||
----
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<mongo:mongo.mongo-client id="client-with-settings" replica-set="rs0">
|
||||
<mongo:client-settings cluster-connection-mode="MULTIPLE"
|
||||
@@ -55,6 +59,3 @@ Please see the https://docs.mongodb.com/manual/reference/connection-string/[Mong
|
||||
</mongo:mongo.mongo-client>
|
||||
----
|
||||
====
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
[[upgrading.data-mongo]]
|
||||
= Upgrading
|
||||
|
||||
include::{spring-data-commons-docs}/upgrade.adoc[leveloffset=+1]
|
||||
|
||||
[[upgrading.3-4]]
|
||||
== Upgrading MongoDB Drivers
|
||||
[[mongodb.migration.3.x-4.x]]
|
||||
= Migration Guide from 3.x to 4.x
|
||||
|
||||
Spring Data MongoDB 4.x requires the MongoDB Java Driver 4.8.x +
|
||||
To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation].
|
||||
8
src/main/antora/modules/ROOT/pages/migration-guides.adoc
Normal file
8
src/main/antora/modules/ROOT/pages/migration-guides.adoc
Normal file
@@ -0,0 +1,8 @@
|
||||
[[mongodb.migration]]
|
||||
= Migration Guides
|
||||
:page-section-summary-toc: 1
|
||||
|
||||
This section contains version-specific migration guides explaining how to upgrade between two versions.
|
||||
|
||||
|
||||
|
||||
23
src/main/antora/modules/ROOT/pages/mongodb.adoc
Normal file
23
src/main/antora/modules/ROOT/pages/mongodb.adoc
Normal file
@@ -0,0 +1,23 @@
|
||||
[[mongodb.core]]
|
||||
= MongoDB Support
|
||||
:page-section-summary-toc: 1
|
||||
|
||||
Spring Data support for MongoDB contains a wide range of features:
|
||||
|
||||
* xref:mongodb/template-config.adoc[Spring configuration support] with Java-based `@Configuration` classes or an XML namespace for a Mongo driver instance and replica sets.
|
||||
* xref:mongodb/template-api.adoc[`MongoTemplate` helper class] that increases productivity when performing common Mongo operations.
|
||||
Includes integrated object mapping between documents and POJOs.
|
||||
* xref:mongodb/template-api.adoc#mongo-template.exception-translation[Exception translation] into Spring's portable Data Access Exception hierarchy.
|
||||
* Feature-rich xref:mongodb/mapping/mapping.adoc[Object Mapping] integrated with Spring's Conversion Service.
|
||||
* xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[Annotation-based mapping metadata] that is extensible to support other metadata formats.
|
||||
* xref:mongodb/lifecycle-events.adoc[Persistence and mapping lifecycle events].
|
||||
* xref:mongodb/template-query-operations.adoc[Java-based Query, Criteria, and Update DSLs].
|
||||
* Automatic implementation of xref:repositories.adoc[Repository interfaces], including support for custom query methods.
|
||||
* xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.type-safe[QueryDSL integration] to support type-safe queries.
|
||||
* xref:mongodb/client-session-transactions.adoc[Multi-Document Transactions].
|
||||
* xref:mongodb/template-query-operations.adoc#mongo.geo-json[GeoSpatial integration].
|
||||
|
||||
For most tasks, you should use `MongoTemplate` or the Repository support, which both leverage the rich mapping functionality.
|
||||
`MongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations.
|
||||
`MongoTemplate` also provides callback methods so that it is easy for you to get the low-level API artifacts, such as `com.mongodb.client.MongoDatabase`, to communicate directly with MongoDB.
|
||||
The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs.
|
||||
@@ -1,12 +1,12 @@
|
||||
[[mongo.aggregation]]
|
||||
== Aggregation Framework Support
|
||||
= Aggregation Framework Support
|
||||
|
||||
Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB.
|
||||
|
||||
[[mongo.aggregation.basic-concepts]]
|
||||
=== Basic Concepts
|
||||
== Basic Concepts
|
||||
|
||||
The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`.
|
||||
|
||||
@@ -52,12 +52,12 @@ List<OutputType> mappedResult = results.getMappedResults();
|
||||
Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence.
|
||||
|
||||
[[mongo.aggregation.supported-aggregation-operations]]
|
||||
=== Supported Aggregation Operations & Stages
|
||||
|
||||
[[aggregation-stages]]
|
||||
.Supported Aggregation Operations & Stages
|
||||
[%collapsible]
|
||||
====
|
||||
The MongoDB Aggregation Framework provides the following types of aggregation stages and operations:
|
||||
|
||||
==== Aggregation Stages
|
||||
|
||||
* addFields - `AddFieldsOperation`
|
||||
* bucket / bucketAuto - `BucketOperation` / `BucketAutoOperation`
|
||||
* count - `CountOperation`
|
||||
@@ -81,6 +81,7 @@ The MongoDB Aggregation Framework provides the following types of aggregation st
|
||||
* unionWith - `UnionWithOperation`
|
||||
* unset - `UnsetOperation`
|
||||
* unwind - `UnwindOperation`
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
@@ -102,21 +103,6 @@ Aggregation.stage("""
|
||||
----
|
||||
====
|
||||
|
||||
==== Aggregation Operators
|
||||
|
||||
* Group/Accumulator Aggregation Operators
|
||||
* Boolean Aggregation Operators
|
||||
* Comparison Aggregation Operators
|
||||
* Arithmetic Aggregation Operators
|
||||
* String Aggregation Operators
|
||||
* Date Aggregation Operators
|
||||
* Array Aggregation Operators
|
||||
* Conditional Aggregation Operators
|
||||
* Lookup Aggregation Operators
|
||||
* Convert Aggregation Operators
|
||||
* Object Aggregation Operators
|
||||
* Script Aggregation Operators
|
||||
|
||||
At the time of this writing, we provide support for the following Aggregation Operators in Spring Data MongoDB:
|
||||
|
||||
.Aggregation Operators currently supported by Spring Data MongoDB
|
||||
@@ -171,7 +157,7 @@ At the time of this writing, we provide support for the following Aggregation Op
|
||||
Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions.
|
||||
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method.
|
||||
Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression:
|
||||
@@ -209,11 +195,12 @@ project().and("firstname").as("name"), sort(ASC, "firstname")
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted Classification
|
||||
== Faceted Classification
|
||||
|
||||
As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
[[buckets]]
|
||||
=== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
@@ -254,12 +241,13 @@ bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> through `andOutputExpression()`.
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.projection.expressions[SpEL expressions] through `andOutputExpression()`.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted Aggregation
|
||||
[[multi-faceted-aggregation]]
|
||||
=== Multi-faceted Aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors.
|
||||
|
||||
@@ -290,9 +278,9 @@ facet(project("title").and("publicationDate").extractYear().as("publicationYear"
|
||||
Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.sort-by-count]]
|
||||
==== Sort By Count
|
||||
=== Sort By Count
|
||||
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <<mongo.aggregation.facet>>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using xref:mongodb/aggregation-framework.adoc#mongo.aggregation.facet[Faceted Classification]. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
|
||||
.Sort by count example
|
||||
====
|
||||
@@ -311,11 +299,12 @@ A sort by count operation is equivalent to the following BSON (Binary JSON):
|
||||
----
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
=== Spring Expression Support in Projection Expressions
|
||||
|
||||
We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex Calculations with SpEL expressions
|
||||
[[complex-calculations-with-spel-expressions]]
|
||||
==== Complex Calculations with SpEL expressions
|
||||
|
||||
Consider the following SpEL expression:
|
||||
|
||||
@@ -336,9 +325,12 @@ The preceding expression is translated into the following projection expression
|
||||
}]}
|
||||
----
|
||||
|
||||
You can see examples in more context in <<mongo.aggregation.examples.example5>> and <<mongo.aggregation.examples.example6>>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB:
|
||||
You can see examples in more context in xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example5[Aggregation Framework Example 5] and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example6[Aggregation Framework Example 6].
|
||||
You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`.
|
||||
|
||||
.Supported SpEL transformations
|
||||
[%collapsible]
|
||||
====
|
||||
[%header,cols="2"]
|
||||
|===
|
||||
| SpEL Expression
|
||||
@@ -374,6 +366,7 @@ You can see examples in more context in <<mongo.aggregation.examples.example5>>
|
||||
| !a
|
||||
| { $not : [$a] }
|
||||
|===
|
||||
====
|
||||
|
||||
In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion:
|
||||
|
||||
@@ -384,12 +377,12 @@ In addition to the transformations shown in the preceding table, you can use sta
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples]]
|
||||
==== Aggregation Framework Examples
|
||||
=== Aggregation Framework Examples
|
||||
|
||||
The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB.
|
||||
|
||||
[[mongo.aggregation.examples.example1]]
|
||||
===== Aggregation Framework Example 1
|
||||
==== Aggregation Framework Example 1
|
||||
|
||||
In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting).
|
||||
|
||||
@@ -430,7 +423,7 @@ The preceding listing uses the following algorithm:
|
||||
Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example2]]
|
||||
===== Aggregation Framework Example 2
|
||||
==== Aggregation Framework Example 2
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection).
|
||||
|
||||
@@ -496,7 +489,7 @@ The preceding listings use the following algorithm:
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example3]]
|
||||
===== Aggregation Framework Example 3
|
||||
==== Aggregation Framework Example 3
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering).
|
||||
|
||||
@@ -532,7 +525,7 @@ The preceding listings use the following algorithm:
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example4]]
|
||||
===== Aggregation Framework Example 4
|
||||
==== Aggregation Framework Example 4
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations in the projection operation.
|
||||
|
||||
@@ -566,7 +559,7 @@ List<Document> resultList = result.getMappedResults();
|
||||
Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example5]]
|
||||
===== Aggregation Framework Example 5
|
||||
==== Aggregation Framework Example 5
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
@@ -600,7 +593,7 @@ List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples.example6]]
|
||||
===== Aggregation Framework Example 6
|
||||
==== Aggregation Framework Example 6
|
||||
|
||||
This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
@@ -634,7 +627,7 @@ List<Document> resultList = result.getMappedResults();
|
||||
Note that we can also refer to other fields of the document within the SpEL expression.
|
||||
|
||||
[[mongo.aggregation.examples.example7]]
|
||||
===== Aggregation Framework Example 7
|
||||
==== Aggregation Framework Example 7
|
||||
|
||||
This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation].
|
||||
|
||||
47
src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc
Normal file
47
src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc
Normal file
@@ -0,0 +1,47 @@
|
||||
[[mongo.auditing]]
|
||||
= Auditing
|
||||
|
||||
Since Spring Data MongoDB 1.4, auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableMongoAuditing
|
||||
class Config {
|
||||
|
||||
@Bean
|
||||
public AuditorAware<AuditableUser> myAuditorProvider() {
|
||||
return new AuditorAwareImpl();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableReactiveMongoAuditing
|
||||
class Config {
|
||||
|
||||
@Bean
|
||||
public ReactiveAuditorAware<AuditableUser> myAuditorProvider() {
|
||||
return new ReactiveAuditorAwareImpl();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
<mongo:auditing mapping-context-ref="customMappingContext" auditor-aware-ref="yourAuditorAwareImpl"/>
|
||||
----
|
||||
======
|
||||
|
||||
If you expose a bean of type `AuditorAware` / `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types.
|
||||
If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`.
|
||||
@@ -1,5 +1,5 @@
|
||||
[[change-streams]]
|
||||
== Change Streams
|
||||
= Change Streams
|
||||
|
||||
As of MongoDB 3.6, https://docs.mongodb.com/manual/changeStreams/[Change Streams] let applications get notified about changes without having to tail the oplog.
|
||||
|
||||
@@ -12,7 +12,8 @@ changes from all collections within the database. When subscribing to a database
|
||||
suitable type for the event type as conversion might not apply correctly across different entity types.
|
||||
In doubt, use `Document`.
|
||||
|
||||
=== Change Streams with `MessageListener`
|
||||
[[change-streams-with-messagelistener]]
|
||||
== Change Streams with `MessageListener`
|
||||
|
||||
Listening to a https://docs.mongodb.com/manual/tutorial/change-streams-example/[Change Stream by using a Sync Driver] creates a long running, blocking task that needs to be delegated to a separate component.
|
||||
In this case, we need to first create a `MessageListenerContainer`, which will be the main entry point for running the specific `SubscriptionRequest` tasks.
|
||||
@@ -49,7 +50,8 @@ Errors while processing are passed on to an `org.springframework.util.ErrorHandl
|
||||
Please use `register(request, body, errorHandler)` to provide additional functionality.
|
||||
====
|
||||
|
||||
=== Reactive Change Streams
|
||||
[[reactive-change-streams]]
|
||||
== Reactive Change Streams
|
||||
|
||||
Subscribing to Change Streams with the reactive API is a more natural approach to work with streams. Still, the essential building blocks, such as `ChangeStreamOptions`, remain the same. The following example shows how to use Change Streams emitting ``ChangeStreamEvent``s:
|
||||
|
||||
@@ -67,7 +69,8 @@ Flux<ChangeStreamEvent<User>> flux = reactiveTemplate.changeStream(User.class) <
|
||||
<3> Obtain a `Flux` of change stream events. The `ChangeStreamEvent#getBody()` is converted to the requested domain type from (2).
|
||||
====
|
||||
|
||||
=== Resuming Change Streams
|
||||
[[resuming-change-streams]]
|
||||
== Resuming Change Streams
|
||||
|
||||
Change Streams can be resumed and resume emitting events where you left. To resume the stream, you need to supply either a resume
|
||||
token or the last known server time (in UTC). Use `ChangeStreamOptions` to set the value accordingly.
|
||||
@@ -1,22 +1,31 @@
|
||||
[[mongo.sessions]]
|
||||
= MongoDB Sessions
|
||||
= Sessions & Transactions
|
||||
|
||||
As of version 3.6, MongoDB supports the concept of sessions. The use of sessions enables MongoDB's https://docs.mongodb.com/manual/core/read-isolation-consistency-recency/#causal-consistency[Causal Consistency] model, which guarantees running operations in an order that respects their causal relationships. Those are split into `ServerSession` instances and `ClientSession` instances. In this section, when we speak of a session, we refer to `ClientSession`.
|
||||
As of version 3.6, MongoDB supports the concept of sessions.
|
||||
The use of sessions enables MongoDB's https://docs.mongodb.com/manual/core/read-isolation-consistency-recency/#causal-consistency[Causal Consistency] model, which guarantees running operations in an order that respects their causal relationships.
|
||||
Those are split into `ServerSession` instances and `ClientSession` instances.
|
||||
In this section, when we speak of a session, we refer to `ClientSession`.
|
||||
|
||||
WARNING: Operations within a client session are not isolated from operations outside the session.
|
||||
|
||||
Both `MongoOperations` and `ReactiveMongoOperations` provide gateway methods for tying a `ClientSession` to the operations. `MongoCollection` and `MongoDatabase` use session proxy objects that implement MongoDB's collection and database interfaces, so you need not add a session on each call. This means that a potential call to `MongoCollection#find()` is delegated to `MongoCollection#find(ClientSession)`.
|
||||
Both `MongoOperations` and `ReactiveMongoOperations` provide gateway methods for tying a `ClientSession` to the operations.
|
||||
`MongoCollection` and `MongoDatabase` use session proxy objects that implement MongoDB's collection and database interfaces, so you need not add a session on each call.
|
||||
This means that a potential call to `MongoCollection#find()` is delegated to `MongoCollection#find(ClientSession)`.
|
||||
|
||||
NOTE: Methods such as `(Reactive)MongoOperations#getCollection` return native MongoDB Java Driver gateway objects (such as `MongoCollection`) that themselves offer dedicated methods for `ClientSession`. These methods are *NOT* session-proxied. You should provide the `ClientSession` where needed when interacting directly with a `MongoCollection` or `MongoDatabase` and not through one of the `#execute` callbacks on `MongoOperations`.
|
||||
|
||||
[[mongo.sessions.sync]]
|
||||
== Synchronous `ClientSession` support.
|
||||
[[mongo.sessions.reactive]]
|
||||
== ClientSession support
|
||||
|
||||
The following example shows the usage of a session:
|
||||
|
||||
.`ClientSession` with `MongoOperations`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java]
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
ClientSessionOptions sessionOptions = ClientSessionOptions.builder()
|
||||
.causallyConsistent(true)
|
||||
@@ -44,27 +53,23 @@ session.close() <4>
|
||||
<2> Use `MongoOperation` methods as before. The `ClientSession` gets applied automatically.
|
||||
<3> Make sure to close the `ClientSession`.
|
||||
<4> Close the session.
|
||||
====
|
||||
|
||||
WARNING: When dealing with `DBRef` instances, especially lazily loaded ones, it is essential to *not* close the `ClientSession` before all data is loaded. Otherwise, lazy fetch fails.
|
||||
|
||||
[[mongo.sessions.reactive]]
|
||||
== Reactive `ClientSession` support
|
||||
|
||||
The reactive counterpart uses the same building blocks as the imperative one, as the following example shows:
|
||||
|
||||
.ClientSession with `ReactiveMongoOperations`
|
||||
====
|
||||
[source,java]
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
ClientSessionOptions sessionOptions = ClientSessionOptions.builder()
|
||||
.causallyConsistent(true)
|
||||
.build();
|
||||
.causallyConsistent(true)
|
||||
.build();
|
||||
|
||||
Publisher<ClientSession> session = client.startSession(sessionOptions); <1>
|
||||
|
||||
template.withSession(session)
|
||||
.execute(action -> {
|
||||
.execute(action -> {
|
||||
|
||||
Query query = query(where("name").is("Durzo Blint"));
|
||||
return action.findOne(query, Person.class)
|
||||
@@ -82,28 +87,33 @@ template.withSession(session)
|
||||
<2> Use `ReactiveMongoOperation` methods as before. The `ClientSession` is obtained and applied automatically.
|
||||
<3> Make sure to close the `ClientSession`.
|
||||
<4> Nothing happens until you subscribe. See https://projectreactor.io/docs/core/release/reference/#reactive.subscribe[the Project Reactor Reference Guide] for details.
|
||||
====
|
||||
|
||||
By using a `Publisher` that provides the actual session, you can defer session acquisition to the point of actual subscription.
|
||||
Still, you need to close the session when done, so as to not pollute the server with stale sessions. Use the `doFinally` hook on `execute` to call `ClientSession#close()` when you no longer need the session.
|
||||
If you prefer having more control over the session itself, you can obtain the `ClientSession` through the driver and provide it through a `Supplier`.
|
||||
|
||||
NOTE: Reactive use of `ClientSession` is limited to Template API usage. There's currently no session integration with reactive repositories.
|
||||
====
|
||||
======
|
||||
|
||||
[[mongo.transactions]]
|
||||
= MongoDB Transactions
|
||||
== MongoDB Transactions
|
||||
|
||||
As of version 4, MongoDB supports https://www.mongodb.com/transactions[Transactions]. Transactions are built on top of <<mongo.sessions,Sessions>> and, consequently, require an active `ClientSession`.
|
||||
As of version 4, MongoDB supports https://www.mongodb.com/transactions[Transactions]. Transactions are built on top of xref:mongodb/client-session-transactions.adoc[Sessions] and, consequently, require an active `ClientSession`.
|
||||
|
||||
NOTE: Unless you specify a `MongoTransactionManager` within your application context, transaction support is *DISABLED*. You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions.
|
||||
|
||||
To get full programmatic control over transactions, you may want to use the session callback on `MongoOperations`.
|
||||
|
||||
The following example shows programmatic transaction control within a `SessionCallback`:
|
||||
The following example shows programmatic transaction control:
|
||||
|
||||
.Programmatic transactions
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java]
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
ClientSession session = client.startSession(options); <1>
|
||||
|
||||
@@ -133,19 +143,58 @@ template.withSession(session)
|
||||
<3> If everything works out as expected, commit the changes.
|
||||
<4> Something broke, so roll back everything.
|
||||
<5> Do not forget to close the session when done.
|
||||
====
|
||||
|
||||
The preceding example lets you have full control over transactional behavior while using the session scoped `MongoOperations` instance within the callback to ensure the session is passed on to every server call.
|
||||
To avoid some of the overhead that comes with this approach, you can use a `TransactionTemplate` to take away some of the noise of manual transaction flow.
|
||||
====
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<DeleteResult> result = Mono
|
||||
.from(client.startSession()) <1>
|
||||
|
||||
.flatMap(session -> {
|
||||
session.startTransaction(); <2>
|
||||
|
||||
return Mono.from(collection.deleteMany(session, ...)) <3>
|
||||
|
||||
.onErrorResume(e -> Mono.from(session.abortTransaction()).then(Mono.error(e))) <4>
|
||||
|
||||
.flatMap(val -> Mono.from(session.commitTransaction()).then(Mono.just(val))) <5>
|
||||
|
||||
.doFinally(signal -> session.close()); <6>
|
||||
});
|
||||
----
|
||||
<1> First we obviously need to initiate the session.
|
||||
<2> Once we have the `ClientSession` at hand, start the transaction.
|
||||
<3> Operate within the transaction by passing on the `ClientSession` to the operation.
|
||||
<4> If the operations completes exceptionally, we need to stop the transaction and preserve the error.
|
||||
<5> Or of course, commit the changes in case of success. Still preserving the operations result.
|
||||
<6> Lastly, we need to make sure to close the session.
|
||||
|
||||
The culprit of the above operation is in keeping the main flows `DeleteResult` instead of the transaction outcome
|
||||
published via either `commitTransaction()` or `abortTransaction()`, which leads to a rather complicated setup.
|
||||
|
||||
NOTE: Unless you specify a `ReactiveMongoTransactionManager` within your application context, transaction support is *DISABLED*. You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions.
|
||||
====
|
||||
======
|
||||
|
||||
[[mongo.transactions.transaction-template]]
|
||||
== Transactions with `TransactionTemplate`
|
||||
[[mongo.transactions.reactive-operator]]
|
||||
== Transactions with TransactionTemplate / TransactionalOperator
|
||||
|
||||
Spring Data MongoDB transactions support a `TransactionTemplate`. The following example shows how to create and use a `TransactionTemplate`:
|
||||
Spring Data MongoDB transactions support both `TransactionTemplate` and `TransactionalOperator`.
|
||||
|
||||
.Transactions with `TransactionTemplate`
|
||||
.Transactions with `TransactionTemplate` / `TransactionalOperator`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java]
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
template.setSessionSynchronization(ALWAYS); <1>
|
||||
|
||||
@@ -170,19 +219,54 @@ txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
<1> Enable transaction synchronization during Template API configuration.
|
||||
<2> Create the `TransactionTemplate` using the provided `PlatformTransactionManager`.
|
||||
<3> Within the callback the `ClientSession` and transaction are already registered.
|
||||
====
|
||||
|
||||
CAUTION: Changing state of `MongoTemplate` during runtime (as you might think would be possible in item 1 of the preceding listing) can cause threading and visibility issues.
|
||||
====
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
template.setSessionSynchronization(ALWAYS); <1>
|
||||
|
||||
// ...
|
||||
|
||||
TransactionalOperator rxtx = TransactionalOperator.create(anyTxManager,
|
||||
new DefaultTransactionDefinition()); <2>
|
||||
|
||||
|
||||
Step step = // ...;
|
||||
template.insert(step);
|
||||
|
||||
Mono<Void> process(step)
|
||||
.then(template.update(Step.class).apply(Update.set("state", …))
|
||||
.as(rxtx::transactional) <3>
|
||||
.then();
|
||||
----
|
||||
<1> Enable transaction synchronization for Transactional participation.
|
||||
<2> Create the `TransactionalOperator` using the provided `ReactiveTransactionManager`.
|
||||
<3> `TransactionalOperator.transactional(…)` provides transaction management for all upstream operations.
|
||||
====
|
||||
======
|
||||
|
||||
[[mongo.transactions.tx-manager]]
|
||||
== Transactions with `MongoTransactionManager`
|
||||
[[mongo.transactions.reactive-tx-manager]]
|
||||
== Transactions with MongoTransactionManager & ReactiveMongoTransactionManager
|
||||
|
||||
`MongoTransactionManager` is the gateway to the well known Spring transaction support. It lets applications use link:{springDocsUrl}/data-access.html#transaction[the managed transaction features of Spring].
|
||||
The `MongoTransactionManager` binds a `ClientSession` to the thread. `MongoTemplate` detects the session and operates on these resources which are associated with the transaction accordingly. `MongoTemplate` can also participate in other, ongoing transactions. The following example shows how to create and use transactions with a `MongoTransactionManager`:
|
||||
`MongoTransactionManager` / `ReactiveMongoTransactionManager` is the gateway to the well known Spring transaction support.
|
||||
It lets applications use link:{springDocsUrl}/data-access.html#transaction[the managed transaction features of Spring].
|
||||
The `MongoTransactionManager` binds a `ClientSession` to the thread whereas the `ReactiveMongoTransactionManager` is using the `ReactorContext` for this.
|
||||
`MongoTemplate` detects the session and operates on these resources which are associated with the transaction accordingly.
|
||||
`MongoTemplate` can also participate in other, ongoing transactions. The following example shows how to create and use transactions with a `MongoTransactionManager`:
|
||||
|
||||
.Transactions with `MongoTransactionManager`
|
||||
.Transactions with `MongoTransactionManager` / `ReactiveMongoTransactionManager`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java]
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
@@ -212,94 +296,15 @@ public class StateService {
|
||||
----
|
||||
<1> Register `MongoTransactionManager` in the application context.
|
||||
<2> Mark methods as transactional.
|
||||
====
|
||||
|
||||
NOTE: `@Transactional(readOnly = true)` advises `MongoTransactionManager` to also start a transaction that adds the
|
||||
`ClientSession` to outgoing requests.
|
||||
|
||||
[[mongo.transactions.reactive]]
|
||||
== Reactive Transactions
|
||||
|
||||
Same as with the reactive `ClientSession` support, the `ReactiveMongoTemplate` offers dedicated methods for operating
|
||||
within a transaction without having to worry about the committing or stopping actions depending on the operations outcome.
|
||||
|
||||
NOTE: Unless you specify a `ReactiveMongoTransactionManager` within your application context, transaction support is *DISABLED*. You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions.
|
||||
|
||||
Using the plain MongoDB reactive driver API a `delete` within a transactional flow may look like this.
|
||||
|
||||
.Native driver support
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Mono<DeleteResult> result = Mono
|
||||
.from(client.startSession()) <1>
|
||||
|
||||
.flatMap(session -> {
|
||||
session.startTransaction(); <2>
|
||||
|
||||
return Mono.from(collection.deleteMany(session, ...)) <3>
|
||||
|
||||
.onErrorResume(e -> Mono.from(session.abortTransaction()).then(Mono.error(e))) <4>
|
||||
|
||||
.flatMap(val -> Mono.from(session.commitTransaction()).then(Mono.just(val))) <5>
|
||||
|
||||
.doFinally(signal -> session.close()); <6>
|
||||
});
|
||||
----
|
||||
<1> First we obviously need to initiate the session.
|
||||
<2> Once we have the `ClientSession` at hand, start the transaction.
|
||||
<3> Operate within the transaction by passing on the `ClientSession` to the operation.
|
||||
<4> If the operations completes exceptionally, we need to stop the transaction and preserve the error.
|
||||
<5> Or of course, commit the changes in case of success. Still preserving the operations result.
|
||||
<6> Lastly, we need to make sure to close the session.
|
||||
`ClientSession` to outgoing requests.
|
||||
====
|
||||
|
||||
The culprit of the above operation is in keeping the main flows `DeleteResult` instead of the transaction outcome
|
||||
published via either `commitTransaction()` or `abortTransaction()`, which leads to a rather complicated setup.
|
||||
|
||||
[[mongo.transactions.reactive-operator]]
|
||||
== Transactions with `TransactionalOperator`
|
||||
|
||||
Spring Data MongoDB transactions support a `TransactionalOperator`. The following example shows how to create and use a `TransactionalOperator`:
|
||||
|
||||
.Transactions with `TransactionalOperator`
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.setSessionSynchronization(ALWAYS); <1>
|
||||
|
||||
// ...
|
||||
|
||||
TransactionalOperator rxtx = TransactionalOperator.create(anyTxManager,
|
||||
new DefaultTransactionDefinition()); <2>
|
||||
|
||||
|
||||
Step step = // ...;
|
||||
template.insert(step);
|
||||
|
||||
Mono<Void> process(step)
|
||||
.then(template.update(Step.class).apply(Update.set("state", …))
|
||||
.as(rxtx::transactional) <3>
|
||||
.then();
|
||||
----
|
||||
<1> Enable transaction synchronization for Transactional participation.
|
||||
<2> Create the `TransactionalOperator` using the provided `ReactiveTransactionManager`.
|
||||
<3> `TransactionalOperator.transactional(…)` provides transaction management for all upstream operations.
|
||||
====
|
||||
|
||||
[[mongo.transactions.reactive-tx-manager]]
|
||||
== Transactions with `ReactiveMongoTransactionManager`
|
||||
|
||||
`ReactiveMongoTransactionManager` is the gateway to the well known Spring transaction support.
|
||||
It allows applications to leverage https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/data-access.html#transaction[the managed transaction features of Spring].
|
||||
The `ReactiveMongoTransactionManager` binds a `ClientSession` to the subscriber `Context`.
|
||||
`ReactiveMongoTemplate` detects the session and operates on these resources which are associated with the transaction accordingly.
|
||||
`ReactiveMongoTemplate` can also participate in other, ongoing transactions.
|
||||
The following example shows how to create and use transactions with a `ReactiveMongoTransactionManager`:
|
||||
|
||||
.Transactions with `ReactiveMongoTransactionManager`
|
||||
====
|
||||
[source,java]
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
public class Config extends AbstractReactiveMongoConfiguration {
|
||||
@@ -327,9 +332,10 @@ public class StateService {
|
||||
----
|
||||
<1> Register `ReactiveMongoTransactionManager` in the application context.
|
||||
<2> Mark methods as transactional.
|
||||
====
|
||||
|
||||
NOTE: `@Transactional(readOnly = true)` advises `ReactiveMongoTransactionManager` to also start a transaction that adds the `ClientSession` to outgoing requests.
|
||||
====
|
||||
======
|
||||
|
||||
[[mongo.transactions.behavior]]
|
||||
== Special behavior inside transactions
|
||||
90
src/main/antora/modules/ROOT/pages/mongodb/collation.adoc
Normal file
90
src/main/antora/modules/ROOT/pages/mongodb/collation.adoc
Normal file
@@ -0,0 +1,90 @@
|
||||
[[mongo.collation]]
|
||||
= Collations
|
||||
|
||||
Since version 3.4, MongoDB supports collations for collection and index creation and various query operations.
|
||||
Collations define string comparison rules based on the http://userguide.icu-project.org/collation/concepts[ICU collations].
|
||||
A collation document consists of various properties that are encapsulated in `Collation`, as the following listing shows:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Collation collation = Collation.of("fr") <1>
|
||||
|
||||
.strength(ComparisonLevel.secondary() <2>
|
||||
.includeCase())
|
||||
|
||||
.numericOrderingEnabled() <3>
|
||||
|
||||
.alternate(Alternate.shifted().punct()) <4>
|
||||
|
||||
.forwardDiacriticSort() <5>
|
||||
|
||||
.normalizationEnabled(); <6>
|
||||
----
|
||||
<1> `Collation` requires a locale for creation. This can be either a string representation of the locale, a `Locale` (considering language, country, and variant) or a `CollationLocale`. The locale is mandatory for creation.
|
||||
<2> Collation strength defines comparison levels that denote differences between characters. You can configure various options (case-sensitivity, case-ordering, and others), depending on the selected strength.
|
||||
<3> Specify whether to compare numeric strings as numbers or as strings.
|
||||
<4> Specify whether the collation should consider whitespace and punctuation as base characters for purposes of comparison.
|
||||
<5> Specify whether strings with diacritics sort from back of the string, such as with some French dictionary ordering.
|
||||
<6> Specify whether to check whether text requires normalization and whether to perform normalization.
|
||||
====
|
||||
|
||||
Collations can be used to create collections and indexes. If you create a collection that specifies a collation, the
|
||||
collation is applied to index creation and queries unless you specify a different collation. A collation is valid for a
|
||||
whole operation and cannot be specified on a per-field basis.
|
||||
|
||||
Like other metadata, collations can be be derived from the domain type via the `collation` attribute of the `@Document`
|
||||
annotation and will be applied directly when running queries, creating collections or indexes.
|
||||
|
||||
NOTE: Annotated collations will not be used when a collection is auto created by MongoDB on first interaction. This would
|
||||
require additional store interaction delaying the entire process. Please use `MongoOperations.createCollection` for those cases.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Collation french = Collation.of("fr");
|
||||
Collation german = Collation.of("de");
|
||||
|
||||
template.createCollection(Person.class, CollectionOptions.just(collation));
|
||||
|
||||
template.indexOps(Person.class).ensureIndex(new Index("name", Direction.ASC).collation(german));
|
||||
----
|
||||
|
||||
NOTE: MongoDB uses simple binary comparison if no collation is specified (`Collation.simple()`).
|
||||
|
||||
Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show:
|
||||
|
||||
.Using collation with `find`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Collation collation = Collation.of("de");
|
||||
|
||||
Query query = new Query(Criteria.where("firstName").is("Amél")).collation(collation);
|
||||
|
||||
List<Person> results = template.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
.Using collation with `aggregate`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Collation collation = Collation.of("de");
|
||||
|
||||
AggregationOptions options = AggregationOptions.builder().collation(collation).build();
|
||||
|
||||
Aggregation aggregation = newAggregation(
|
||||
project("tags"),
|
||||
unwind("tags"),
|
||||
group("tags")
|
||||
.count().as("count")
|
||||
).withOptions(options);
|
||||
|
||||
AggregationResults<TagCount> results = template.aggregate(aggregation, "tags", TagCount.class);
|
||||
----
|
||||
====
|
||||
|
||||
WARNING: Indexes are only used if the collation used for the operation matches the index collation.
|
||||
|
||||
xref:mongodb/repositories/repositories.adoc[MongoDB Repositories] support `Collations` via the `collation` attribute of the `@Query` annotation.
|
||||
|
||||
336
src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc
Normal file
336
src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc
Normal file
@@ -0,0 +1,336 @@
|
||||
[[mongodb-connectors]]
|
||||
= Connecting to MongoDB
|
||||
|
||||
One of the first tasks when using MongoDB and Spring is to create a `MongoClient` object using the IoC container.
|
||||
There are two main ways to do this, either by using Java-based bean metadata or by using XML-based bean metadata.
|
||||
|
||||
NOTE: For those not familiar with how to configure the Spring container using Java-based bean metadata instead of XML-based metadata, see the high-level introduction in the reference docs https://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here] as well as the detailed documentation https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#beans-java-instantiating-container[here].
|
||||
|
||||
[[mongo.mongo-java-config]]
|
||||
== Registering a Mongo Instance
|
||||
|
||||
The following example shows an example to register an instance of a `MongoClient`:
|
||||
|
||||
.Registering `MongoClient`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance.
|
||||
*/
|
||||
public @Bean com.mongodb.client.MongoClient mongoClient() {
|
||||
return com.mongodb.client.MongoClients.create("mongodb://localhost:27017");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance.
|
||||
*/
|
||||
public @Bean com.mongodb.reactivestreams.client.MongoClient mongoClient() {
|
||||
return com.mongodb.reactivestreams.client.MongoClients.create("mongodb://localhost:27017");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="third"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation=
|
||||
"
|
||||
http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<!-- Default bean name is 'mongo' -->
|
||||
<mongo:mongo-client host="localhost" port="27017"/>
|
||||
|
||||
</beans>
|
||||
----
|
||||
======
|
||||
|
||||
This approach lets you use the standard `MongoClient` instance, with the container using Spring's `MongoClientFactoryBean`/`ReactiveMongoClientFactoryBean`.
|
||||
As compared to instantiating a `MongoClient` instance directly, the `FactoryBean` has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation.
|
||||
This hierarchy and the use of `@Repository` is described in link:{springDocsUrl}/data-access.html[Spring's DAO support features].
|
||||
|
||||
The following example shows an example of a Java-based bean metadata that supports exception translation on `@Repository` annotated classes:
|
||||
|
||||
.Registering a `MongoClient` via `MongoClientFactoryBean` / `ReactiveMongoClientFactoryBean`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Factory bean that creates the com.mongodb.client.MongoClient instance
|
||||
*/
|
||||
public @Bean MongoClientFactoryBean mongo() {
|
||||
MongoClientFactoryBean mongo = new MongoClientFactoryBean();
|
||||
mongo.setHost("localhost");
|
||||
return mongo;
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance
|
||||
*/
|
||||
public @Bean ReactiveMongoClientFactoryBean mongo() {
|
||||
ReactiveMongoClientFactoryBean mongo = new ReactiveMongoClientFactoryBean();
|
||||
mongo.setHost("localhost");
|
||||
return mongo;
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
To access the `MongoClient` object created by the `FactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired MongoClient mongoClient;` field.
|
||||
|
||||
[[mongo.mongo-db-factory]]
|
||||
== The MongoDatabaseFactory Interface
|
||||
|
||||
While `MongoClient` is the entry point to the MongoDB driver API, connecting to a specific MongoDB database instance requires additional information, such as the database name and an optional username and password.
|
||||
With that information, you can obtain a `MongoDatabase` object and access all the functionality of a specific MongoDB database instance.
|
||||
Spring provides the `org.springframework.data.mongodb.core.MongoDatabaseFactory` & `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interfaces, shown in the following listing, to bootstrap connectivity to the database:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface MongoDatabaseFactory {
|
||||
|
||||
MongoDatabase getDatabase() throws DataAccessException;
|
||||
|
||||
MongoDatabase getDatabase(String dbName) throws DataAccessException;
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface ReactiveMongoDatabaseFactory {
|
||||
|
||||
Mono<MongoDatabase> getDatabase() throws DataAccessException;
|
||||
|
||||
Mono<MongoDatabase> getDatabase(String dbName) throws DataAccessException;
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
The following sections show how you can use the container with either Java-based or XML-based metadata to configure an instance of the `MongoDatabaseFactory` interface.
|
||||
In turn, you can use the `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory` instance to configure `MongoTemplate` / `ReactiveMongoTemplate`.
|
||||
|
||||
Instead of using the IoC container to create an instance of the template, you can use them in standard Java code, as follows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public class MongoApplication {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
MongoOperations mongoOps = new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database"));
|
||||
|
||||
// ...
|
||||
}
|
||||
}
|
||||
----
|
||||
The code in bold highlights the use of `SimpleMongoClientDbFactory` and is the only difference between the listing shown in the xref:mongodb/getting-started.adoc[getting started section].
|
||||
Use `SimpleMongoClientDbFactory` when choosing `com.mongodb.client.MongoClient` as the entrypoint of choice.
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public class ReactiveMongoApplication {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
ReactiveMongoOperations mongoOps = new MongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"));
|
||||
|
||||
// ...
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo.mongo-db-factory-java]]
|
||||
[[mongo.mongo-db-factory.config]]
|
||||
== Registering a `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory`
|
||||
|
||||
To register a `MongoDatabaseFactory`/ `ReactiveMongoDatabaseFactory` instance with the container, you write code much like what was highlighted in the previous section.
|
||||
The following listing shows a simple example:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
public class MongoConfiguration {
|
||||
|
||||
@Bean
|
||||
public MongoDatabaseFactory mongoDatabaseFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
public class ReactiveMongoConfiguration {
|
||||
|
||||
@Bean
|
||||
public ReactiveMongoDatabaseFactory mongoDatabaseFactory() {
|
||||
return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database");
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
MongoDB Server generation 3 changed the authentication model when connecting to the DB.
|
||||
Therefore, some of the configuration options available for authentication are no longer valid.
|
||||
You should use the `MongoClient`-specific options for setting credentials through `MongoCredential` to provide authentication data, as shown in the following example:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Java::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
public class MongoAppConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configureClientSettings(Builder builder) {
|
||||
|
||||
builder
|
||||
.credential(MongoCredential.createCredential("name", "db", "pwd".toCharArray()))
|
||||
.applyToClusterSettings(settings -> {
|
||||
settings.hosts(singletonList(new ServerAddress("127.0.0.1", 27017)));
|
||||
});
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
<mongo:db-factory dbname="database" />
|
||||
----
|
||||
Username and password credentials used in XML-based configuration must be URL-encoded when these contain reserved characters, such as `:`, `%`, `@`, or `,`.
|
||||
The following example shows encoded credentials:
|
||||
`m0ng0@dmin:mo_res:bw6},Qsdxx@admin@database` -> `m0ng0%40dmin:mo_res%3Abw6%7D%2CQsdxx%40admin@database`
|
||||
See https://tools.ietf.org/html/rfc3986#section-2.2[section 2.2 of RFC 3986] for further details.
|
||||
======
|
||||
|
||||
If you need to configure additional options on the `com.mongodb.client.MongoClient` instance that is used to create a `SimpleMongoClientDbFactory`, you can refer to an existing bean as shown in the following example. To show another common usage pattern, the following listing shows the use of a property placeholder, which lets you parametrize the configuration and the creation of a `MongoTemplate`:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Java::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
@PropertySource("classpath:/com/myapp/mongodb/config/mongo.properties")
|
||||
public class MongoAppConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Autowired
|
||||
Environment env;
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configureClientSettings(Builder builder) {
|
||||
|
||||
builder.applyToClusterSettings(settings -> {
|
||||
settings.hosts(singletonList(
|
||||
new ServerAddress(env.getProperty("mongo.host"), env.getProperty("mongo.port", Integer.class))));
|
||||
});
|
||||
|
||||
builder.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
settings.maxConnectionLifeTime(env.getProperty("mongo.pool-max-life-time", Integer.class), TimeUnit.MILLISECONDS)
|
||||
.minSize(env.getProperty("mongo.pool-min-size", Integer.class))
|
||||
.maxSize(env.getProperty("mongo.pool-max-size", Integer.class))
|
||||
.maintenanceFrequency(10, TimeUnit.MILLISECONDS)
|
||||
.maintenanceInitialDelay(11, TimeUnit.MILLISECONDS)
|
||||
.maxConnectionIdleTime(30, TimeUnit.SECONDS)
|
||||
.maxWaitTime(15, TimeUnit.MILLISECONDS);
|
||||
});
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
<context:property-placeholder location="classpath:/com/myapp/mongodb/config/mongo.properties"/>
|
||||
|
||||
<mongo:mongo-client host="${mongo.host}" port="${mongo.port}">
|
||||
<mongo:client-settings connection-pool-max-connection-life-time="${mongo.pool-max-life-time}"
|
||||
connection-pool-min-size="${mongo.pool-min-size}"
|
||||
connection-pool-max-size="${mongo.pool-max-size}"
|
||||
connection-pool-maintenance-frequency="10"
|
||||
connection-pool-maintenance-initial-delay="11"
|
||||
connection-pool-max-connection-idle-time="30"
|
||||
connection-pool-max-wait-time="15" />
|
||||
</mongo:mongo-client>
|
||||
|
||||
<mongo:db-factory dbname="database" mongo-ref="mongoClient"/>
|
||||
|
||||
<bean id="anotherMongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
</bean>
|
||||
----
|
||||
======
|
||||
@@ -0,0 +1,125 @@
|
||||
[[mongo-template.type-mapping]]
|
||||
== Type Mapping
|
||||
|
||||
MongoDB collections can contain documents that represent instances of a variety of types.
|
||||
This feature can be useful if you store a hierarchy of classes or have a class with a property of type `Object`.In the latter case, the values held inside that property have to be read in correctly when retrieving the object.Thus, we need a mechanism to store type information alongside the actual document.
|
||||
|
||||
To achieve that, the `MappingMongoConverter` uses a `MongoTypeMapper` abstraction with `DefaultMongoTypeMapper` as its main implementation.Its default behavior to store the fully qualified classname under `_class` inside the document.Type hints are written for top-level documents as well as for every value (if it is a complex type and a subtype of the declared property type).The following example (with a JSON representation at the end) shows how the mapping works:
|
||||
|
||||
.Type mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class Sample {
|
||||
Contact value;
|
||||
}
|
||||
|
||||
abstract class Contact { … }
|
||||
|
||||
class Person extends Contact { … }
|
||||
|
||||
Sample sample = new Sample();
|
||||
sample.value = new Person();
|
||||
|
||||
mongoTemplate.save(sample);
|
||||
|
||||
{
|
||||
"value" : { "_class" : "com.acme.Person" },
|
||||
"_class" : "com.acme.Sample"
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Spring Data MongoDB stores the type information as the last field for the actual root class as well as for the nested type (because it is complex and a subtype of `Contact`).So, if you now use `mongoTemplate.findAll(Object.class, "sample")`, you can find out that the document stored is a `Sample` instance.You can also find out that the value property is actually a `Person`.
|
||||
|
||||
[[customizing-type-mapping]]
|
||||
=== Customizing Type Mapping
|
||||
|
||||
If you want to avoid writing the entire Java class name as type information but would rather like to use a key, you can use the `@TypeAlias` annotation on the entity class.If you need to customize the mapping even more, have a look at the `TypeInformationMapper` interface.An instance of that interface can be configured at the `DefaultMongoTypeMapper`, which can, in turn, be configured on `MappingMongoConverter`.The following example shows how to define a type alias for an entity:
|
||||
|
||||
.Defining a type alias for an Entity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@TypeAlias("pers")
|
||||
class Person {
|
||||
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Note that the resulting document contains `pers` as the value in the `_class` Field.
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
Type aliases only work if the mapping context is aware of the actual type.
|
||||
The required entity metadata is determined either on first save or has to be provided via the configurations initial entity set.
|
||||
By default, the configuration class scans the base package for potential candidates.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
class AppConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() {
|
||||
return Collections.singleton(Person.class);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[[configuring-custom-type-mapping]]
|
||||
=== Configuring Custom Type Mapping
|
||||
|
||||
The following example shows how to configure a custom `MongoTypeMapper` in `MappingMongoConverter`:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class CustomMongoTypeMapper extends DefaultMongoTypeMapper {
|
||||
//implement custom type mapping here
|
||||
}
|
||||
----
|
||||
|
||||
.Configuring a custom `MongoTypeMapper`
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
class SampleMongoConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
MappingMongoConverter mmc = super.mappingMongoConverter();
|
||||
mmc.setTypeMapper(customTypeMapper());
|
||||
return mmc;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoTypeMapper customTypeMapper() {
|
||||
return new CustomMongoTypeMapper();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
.XML
|
||||
[source,xml,role="secondary"]
|
||||
----
|
||||
<mongo:mapping-converter type-mapper-ref="customMongoTypeMapper"/>
|
||||
|
||||
<bean name="customMongoTypeMapper" class="com.acme.CustomMongoTypeMapper"/>
|
||||
----
|
||||
====
|
||||
|
||||
Note that the preceding example extends the `AbstractMongoClientConfiguration` class and overrides the bean definition of the `MappingMongoConverter` where we configured our custom `MongoTypeMapper`.
|
||||
|
||||
40
src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc
Normal file
40
src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc
Normal file
@@ -0,0 +1,40 @@
|
||||
TODO: add the following section somewhere
|
||||
|
||||
[[mongo.geo-json.jackson-modules]]
|
||||
== GeoJSON Jackson Modules
|
||||
|
||||
By using the <<core.web>>, Spring Data registers additional Jackson ``Modules``s to the `ObjectMapper` for de-/serializing common Spring Data domain types.
|
||||
Please refer to the <<core.web.basic.jackson-mappers>> section to learn more about the infrastructure setup of this feature.
|
||||
|
||||
The MongoDB module additionally registers ``JsonDeserializer``s for the following GeoJSON types via its `GeoJsonConfiguration` exposing the `GeoJsonModule`.
|
||||
----
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonPoint
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonLineString
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiLineString
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonPolygon
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiPolygon
|
||||
----
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `GeoJsonModule` only registers ``JsonDeserializer``s! +
|
||||
To equip the `ObjectMapper` with a symmetric set of ``JsonSerializer``s you need to either manually configure those for the `ObjectMapper` or provide a custom `SpringDataJacksonModules` configuration exposing `GeoJsonModule.serializers()` as a Spring Bean.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class GeoJsonConfiguration implements SpringDataJacksonModules {
|
||||
|
||||
@Bean
|
||||
public Module geoJsonSerializers() {
|
||||
return GeoJsonModule.serializers();
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
The next major version (`4.0`) will register both, ``JsonDeserializer``s and ``JsonSerializer``s for GeoJSON types by default.
|
||||
====
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
[[mongodb-getting-started]]
|
||||
= Getting Started
|
||||
|
||||
An easy way to bootstrap setting up a working environment is to create a Spring-based project via https://start.spring.io/#!type=maven-project&dependencies=data-mongodb[start.spring.io] or create a Spring project in https://spring.io/tools[Spring Tools].
|
||||
|
||||
[[mongo.examples-repo]]
|
||||
== Examples Repository
|
||||
|
||||
The GitHub https://github.com/spring-projects/spring-data-examples[spring-data-examples repository] hosts several examples that you can download and play around with to get a feel for how the library works.
|
||||
|
||||
[[mongodb.hello-world]]
|
||||
== Hello World
|
||||
|
||||
First, you need to set up a running MongoDB server. Refer to the https://docs.mongodb.org/manual/core/introduction/[MongoDB Quick Start guide] for an explanation on how to startup a MongoDB instance.
|
||||
Once installed, starting MongoDB is typically a matter of running the following command: `/bin/mongod`
|
||||
|
||||
Then you can create a `Person` class to persist:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::example$example/Person.java[tags=file]
|
||||
----
|
||||
====
|
||||
|
||||
You also need a main application to run:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
include::example$example/MongoApplication.java[tags=file]
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
include::example$example/ReactiveMongoApplication.java[tags=file]
|
||||
----
|
||||
======
|
||||
|
||||
When you run the main program, the preceding examples produce the following output:
|
||||
|
||||
[source]
|
||||
----
|
||||
10:01:32,265 DEBUG o.s.data.mongodb.core.MongoTemplate - insert Document containing fields: [_class, age, name] in collection: Person
|
||||
10:01:32,765 DEBUG o.s.data.mongodb.core.MongoTemplate - findOne using query: { "name" : "Joe"} in db.collection: database.Person
|
||||
Person [id=4ddbba3c0be56b7e1b210166, name=Joe, age=34]
|
||||
10:01:32,984 DEBUG o.s.data.mongodb.core.MongoTemplate - Dropped collection [database.person]
|
||||
----
|
||||
|
||||
Even in this simple example, there are few things to notice:
|
||||
|
||||
* You can instantiate the central helper class of Spring Mongo, xref:mongodb/template-api.adoc[`MongoTemplate`], by using the standard or reactive `MongoClient` object and the name of the database to use.
|
||||
* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See xref:mongodb/mapping/mapping.adoc[here].).
|
||||
* Conventions are used for handling the `id` field, converting it to be an `ObjectId` when stored in the database.
|
||||
* Mapping conventions can use field access. Notice that the `Person` class has only getters.
|
||||
* If the constructor argument names match the field names of the stored document, they are used to instantiate the object
|
||||
|
||||
106
src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc
Normal file
106
src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc
Normal file
@@ -0,0 +1,106 @@
|
||||
[[mongodb.mapping-usage.events]]
|
||||
= Lifecycle Events
|
||||
|
||||
The MongoDB mapping framework includes several `org.springframework.context.ApplicationEvent` events that your application can respond to by registering special beans in the `ApplicationContext`.
|
||||
Being based on Spring's `ApplicationContext` event infrastructure enables other products, such as Spring Integration, to easily receive these events, as they are a well known eventing mechanism in Spring-based applications.
|
||||
|
||||
Entity lifecycle events can be costly and you may notice a change in the performance profile when loading large result sets.
|
||||
You can disable lifecycle events on the link:https://docs.spring.io/spring-data/mongodb/docs/{version}/api/org/springframework/data/mongodb/core/MongoTemplate.html#setEntityLifecycleEventsEnabled(boolean)[Template API].
|
||||
|
||||
To intercept an object before it goes through the conversion process (which turns your domain object into a `org.bson.Document`), you can register a subclass of `AbstractMongoEventListener` that overrides the `onBeforeConvert` method.
|
||||
When the event is dispatched, your listener is called and passed the domain object before it goes into the converter.
|
||||
The following example shows how to do so:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class BeforeConvertListener extends AbstractMongoEventListener<Person> {
|
||||
@Override
|
||||
public void onBeforeConvert(BeforeConvertEvent<Person> event) {
|
||||
... does some auditing manipulation, set timestamps, whatever ...
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
To intercept an object before it goes into the database, you can register a subclass of `org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener` that overrides the `onBeforeSave` method. When the event is dispatched, your listener is called and passed the domain object and the converted `com.mongodb.Document`. The following example shows how to do so:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class BeforeSaveListener extends AbstractMongoEventListener<Person> {
|
||||
@Override
|
||||
public void onBeforeSave(BeforeSaveEvent<Person> event) {
|
||||
… change values, delete them, whatever …
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched.
|
||||
|
||||
.Callbacks on `AbstractMappingEventListener`:
|
||||
[%collapsible]
|
||||
====
|
||||
* `onBeforeConvert`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations before the object is converted to a `Document` by a `MongoConverter`.
|
||||
* `onBeforeSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *before* inserting or saving the `Document` in the database.
|
||||
* `onAfterSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *after* inserting or saving the `Document` in the database.
|
||||
* `onAfterLoad`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database.
|
||||
* `onAfterConvert`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database was converted to a POJO.
|
||||
====
|
||||
|
||||
NOTE: Lifecycle events are only emitted for root level types.
|
||||
Complex types used as properties within a document root are not subject to event publication unless they are document references annotated with `@DBRef`.
|
||||
|
||||
WARNING: Lifecycle events depend on an `ApplicationEventMulticaster`, which in case of the `SimpleApplicationEventMulticaster` can be configured with a `TaskExecutor`, and therefore gives no guarantees when an Event is processed.
|
||||
|
||||
include::{commons}@data-commons::page$entity-callbacks.adoc[leveloffset=+1]
|
||||
|
||||
[[mongo.entity-callbacks]]
|
||||
== Store specific EntityCallbacks
|
||||
|
||||
Spring Data MongoDB uses the `EntityCallback` API for its auditing support and reacts on the following callbacks.
|
||||
|
||||
.Supported Entity Callbacks
|
||||
[%header,cols="4"]
|
||||
|===
|
||||
| Callback
|
||||
| Method
|
||||
| Description
|
||||
| Order
|
||||
|
||||
| `ReactiveBeforeConvertCallback`
|
||||
`BeforeConvertCallback`
|
||||
| `onBeforeConvert(T entity, String collection)`
|
||||
| Invoked before a domain object is converted to `org.bson.Document`.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| `ReactiveAfterConvertCallback`
|
||||
`AfterConvertCallback`
|
||||
| `onAfterConvert(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked after a domain object is loaded. +
|
||||
Can modify the domain object after reading it from a `org.bson.Document`.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| `ReactiveAuditingEntityCallback`
|
||||
`AuditingEntityCallback`
|
||||
| `onBeforeConvert(Object entity, String collection)`
|
||||
| Marks an auditable entity _created_ or _modified_
|
||||
| 100
|
||||
|
||||
| `ReactiveBeforeSaveCallback`
|
||||
`BeforeSaveCallback`
|
||||
| `onBeforeSave(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked before a domain object is saved. +
|
||||
Can modify the target, to be persisted, `Document` containing all mapped entity information.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| `ReactiveAfterSaveCallback`
|
||||
`AfterSaveCallback`
|
||||
| `onAfterSave(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked before a domain object is saved. +
|
||||
Can modify the domain object, to be returned after save, `Document` containing all mapped entity information.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
|===
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
[[mongo.custom-converters]]
|
||||
== Custom Conversions - Overriding Default Mapping
|
||||
include::{commons}@data-commons::page$custom-conversions.adoc[]
|
||||
|
||||
The most trivial way of influencing the mapping result is by specifying the desired native MongoDB target type via the
|
||||
`@Field` annotation. This allows to work with non MongoDB types like `BigDecimal` in the domain model while persisting
|
||||
values in native `org.bson.types.Decimal128` format.
|
||||
[[mongo.custom-converters]]
|
||||
== Type based Converter
|
||||
|
||||
The most trivial way of influencing the mapping result is by specifying the desired native MongoDB target type via the `@Field` annotation.
|
||||
This allows to work with non MongoDB types like `BigDecimal` in the domain model while persisting values in native `org.bson.types.Decimal128` format.
|
||||
|
||||
.Explicit target type mapping
|
||||
====
|
||||
@@ -20,6 +21,7 @@ public class Payment {
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
{
|
||||
@@ -28,7 +30,7 @@ public class Payment {
|
||||
"date" : ISODate("2019-04-03T12:11:01.870Z") <3>
|
||||
}
|
||||
----
|
||||
<1> String _id_ values that represent a valid `ObjectId` are converted automatically. See <<mongo-template.id-handling>>
|
||||
<1> String _id_ values that represent a valid `ObjectId` are converted automatically. See xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[How the `_id` Field is Handled in the Mapping Layer]
|
||||
for details.
|
||||
<2> The desired target type is explicitly defined as `Decimal128` which translates to `NumberDecimal`. Otherwise the
|
||||
`BigDecimal` value would have been truned into a `String`.
|
||||
@@ -43,7 +45,7 @@ The `MappingMongoConverter` checks to see if any Spring converters can handle a
|
||||
NOTE: For more information on the Spring type conversion service, see the reference docs link:{springDocsUrl}/core.html#validation[here].
|
||||
|
||||
[[mongo.custom-converters.writer]]
|
||||
=== Saving by Using a Registered Spring Converter
|
||||
=== Writing Converter
|
||||
|
||||
The following example shows an implementation of the `Converter` that converts from a `Person` object to a `org.bson.Document`:
|
||||
|
||||
@@ -66,7 +68,7 @@ public class PersonWriteConverter implements Converter<Person, Document> {
|
||||
----
|
||||
|
||||
[[mongo.custom-converters.reader]]
|
||||
=== Reading by Using a Spring Converter
|
||||
=== Reading Converter
|
||||
|
||||
The following example shows an implementation of a `Converter` that converts from a `Document` to a `Person` object:
|
||||
|
||||
@@ -83,7 +85,7 @@ public class PersonReadConverter implements Converter<Document, Person> {
|
||||
----
|
||||
|
||||
[[mongo.custom-converters.xml]]
|
||||
=== Registering Spring Converters with the `MongoConverter`
|
||||
=== Registering Converters
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@@ -101,5 +103,3 @@ class MyMongoConfiguration extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
include::../{spring-data-commons-docs}/custom-conversions.adoc[leveloffset=+3]
|
||||
@@ -1,5 +1,5 @@
|
||||
[[mapping-usage-references]]
|
||||
=== Using DBRefs
|
||||
= Using DBRefs
|
||||
|
||||
The mapping framework does not have to store child objects embedded within the document.
|
||||
You can also store them separately and use a `DBRef` to refer to that document.
|
||||
@@ -53,10 +53,10 @@ CAUTION: Lazy loading may require class proxies, that in turn, might need access
|
||||
For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument.
|
||||
|
||||
[[mapping-usage.document-references]]
|
||||
=== Using Document References
|
||||
== Using Document References
|
||||
|
||||
Using `@DocumentReference` offers a flexible way of referencing entities in MongoDB.
|
||||
While the goal is the same as when using <<mapping-usage-references,DBRefs>>, the store representation is different.
|
||||
While the goal is the same as when using xref:mongodb/mapping/document-references.adoc[DBRefs], the store representation is different.
|
||||
`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. +
|
||||
Document references, do not follow a specific format.
|
||||
They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB.
|
||||
@@ -0,0 +1,340 @@
|
||||
[[mapping.index-creation]]
|
||||
= Index Creation
|
||||
|
||||
Spring Data MongoDB can automatically create indexes for entity types annotated with `@Document`.
|
||||
Index creation must be explicitly enabled since version 3.0 to prevent undesired effects with collection lifecyle and performance impact.
|
||||
Indexes are automatically created for the initial entity set on application startup and when accessing an entity type for the first time while the application runs.
|
||||
|
||||
We generally recommend explicit index creation for application-based control of indexes as Spring Data cannot automatically create indexes for collections that were recreated while the application was running.
|
||||
|
||||
`IndexResolver` provides an abstraction for programmatic index definition creation if you want to make use of `@Indexed` annotations such as `@GeoSpatialIndexed`, `@TextIndexed`, `@CompoundIndex` and `@WildcardIndexed`.
|
||||
You can use index definitions with `IndexOperations` to create indexes.
|
||||
A good point in time for index creation is on application startup, specifically after the application context was refreshed, triggered by observing `ContextRefreshedEvent`.
|
||||
This event guarantees that the context is fully initialized.
|
||||
Note that at this time other components, especially bean factories might have access to the MongoDB database.
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
``Map``-like properties are skipped by the `IndexResolver` unless annotated with `@WildcardIndexed` because the _map key_ must be part of the index definition. Since the purpose of maps is the usage of dynamic keys and values, the keys cannot be resolved from static mapping metadata.
|
||||
====
|
||||
|
||||
.Programmatic Index Creation for a single Domain Type
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class MyListener {
|
||||
|
||||
@EventListener(ContextRefreshedEvent.class)
|
||||
public void initIndicesAfterStartup() {
|
||||
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext = mongoTemplate
|
||||
.getConverter().getMappingContext();
|
||||
|
||||
IndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext);
|
||||
|
||||
IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);
|
||||
resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
.Programmatic Index Creation for all Initial Entities
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class MyListener{
|
||||
|
||||
@EventListener(ContextRefreshedEvent.class)
|
||||
public void initIndicesAfterStartup() {
|
||||
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext = mongoTemplate
|
||||
.getConverter().getMappingContext();
|
||||
|
||||
// consider only entities that are annotated with @Document
|
||||
mappingContext.getPersistentEntities()
|
||||
.stream()
|
||||
.filter(it -> it.isAnnotationPresent(Document.class))
|
||||
.forEach(it -> {
|
||||
|
||||
IndexOperations indexOps = mongoTemplate.indexOps(it.getType());
|
||||
resolver.resolveIndexFor(it.getType()).forEach(indexOps::ensureIndex);
|
||||
});
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Alternatively, if you want to ensure index and collection presence before any component is able to access your database from your application, declare a `@Bean` method for `MongoTemplate` and include the code from above before returning the `MongoTemplate` object.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
To turn automatic index creation _ON_ please override `autoIndexCreation()` in your configuration.
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
IMPORTANT: Automatic index creation is turned _OFF_ by default as of version 3.0.
|
||||
|
||||
[[mapping-usage-indexes.compound-index]]
|
||||
== Compound Indexes
|
||||
|
||||
Compound indexes are also supported. They are defined at the class level, rather than on individual properties.
|
||||
|
||||
NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields
|
||||
|
||||
Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order:
|
||||
|
||||
.Example Compound Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
package com.mycompany.domain;
|
||||
|
||||
@Document
|
||||
@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}")
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
private Integer age;
|
||||
private String firstName;
|
||||
private String lastName;
|
||||
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
`@CompoundIndex` is repeatable using `@CompoundIndexes` as its container.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}")
|
||||
@CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}")
|
||||
public class Person {
|
||||
|
||||
String firstname;
|
||||
String lastname;
|
||||
|
||||
Address address;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.hashed-index]]
|
||||
== Hashed Indexes
|
||||
|
||||
Hashed indexes allow hash based sharding within a sharded cluster.
|
||||
Using hashed field values to shard collections results in a more random distribution.
|
||||
For details, refer to the https://docs.mongodb.com/manual/core/index-hashed/[MongoDB Documentation].
|
||||
|
||||
Here's an example that creates a hashed index for `_id`:
|
||||
|
||||
.Example Hashed Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@HashIndexed @Id String id;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Hashed indexes can be created next to other index definitions like shown below, in that case both indices are created:
|
||||
|
||||
.Example Hashed Index Usage togehter with simple index
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@Indexed
|
||||
@HashIndexed
|
||||
String value;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
In case the example above is too verbose, a compound annotation allows to reduce the number of annotations that need to be declared on a property:
|
||||
|
||||
.Example Composed Hashed Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@IndexAndHash(name = "idx...") <1>
|
||||
String value;
|
||||
|
||||
// ...
|
||||
}
|
||||
|
||||
@Indexed
|
||||
@HashIndexed
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface IndexAndHash {
|
||||
|
||||
@AliasFor(annotation = Indexed.class, attribute = "name") <1>
|
||||
String name() default "";
|
||||
}
|
||||
----
|
||||
<1> Potentially register an alias for certain attributes of the meta annotation.
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Although index creation via annotations comes in handy for many scenarios cosider taking over more control by setting up indices manually via `IndexOperations`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
mongoOperations.indexOpsFor(Jedi.class)
|
||||
.ensureIndex(HashedIndex.hashed("useTheForce"));
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.wildcard-index]]
|
||||
== Wildcard Indexes
|
||||
|
||||
A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern.
|
||||
For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation].
|
||||
|
||||
The index can be set up programmatically using `WildcardIndex` via `IndexOperations`.
|
||||
|
||||
.Programmatic WildcardIndex setup
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
mongoOperations
|
||||
.indexOps(User.class)
|
||||
.ensureIndex(new WildcardIndex("userMetadata"));
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
----
|
||||
====
|
||||
|
||||
The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property.
|
||||
|
||||
If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a
|
||||
wildcard index for it.
|
||||
|
||||
.Wildcard index on domain type
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@WildcardIndexed
|
||||
public class Product {
|
||||
// …
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.product.createIndex({ "$**" : 1 },{})
|
||||
----
|
||||
====
|
||||
|
||||
The `wildcardProjection` can be used to specify keys to in-/exclude in the index.
|
||||
|
||||
.Wildcard index with `wildcardProjection`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
|
||||
public class User {
|
||||
private @Id String id;
|
||||
private UserMetadata userMetadata;
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex(
|
||||
{ "$**" : 1 },
|
||||
{ "wildcardProjection" :
|
||||
{ "userMetadata.age" : 0 }
|
||||
}
|
||||
)
|
||||
----
|
||||
====
|
||||
|
||||
Wildcard indexes can also be expressed by adding the annotation directly to the field.
|
||||
Please note that `wildcardProjection` is not allowed on nested paths such as properties.
|
||||
Projections on types annotated with `@WildcardIndexed` are omitted during index creation.
|
||||
|
||||
.Wildcard index on property
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class User {
|
||||
private @Id String id;
|
||||
|
||||
@WildcardIndexed
|
||||
private UserMetadata userMetadata;
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.text-index]]
|
||||
== Text Indexes
|
||||
|
||||
NOTE: The text index feature is disabled by default for MongoDB v.2.4.
|
||||
|
||||
Creating a text index allows accumulating several fields into a searchable full-text index.
|
||||
It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index.
|
||||
Properties can be weighted to influence the document score for ranking results.
|
||||
The default language for the text index is English.To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`).
|
||||
Using a property called `language` or `@Language` lets you define a language override on a per-document base.
|
||||
The following example shows how to created a text index and set the language to Spanish:
|
||||
|
||||
.Example Text Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document(language = "spanish")
|
||||
class SomeEntity {
|
||||
|
||||
@TextIndexed String foo;
|
||||
|
||||
@Language String lang;
|
||||
|
||||
Nested nested;
|
||||
}
|
||||
|
||||
class Nested {
|
||||
|
||||
@TextIndexed(weight=5) String bar;
|
||||
String roo;
|
||||
}
|
||||
----
|
||||
====
|
||||
@@ -1,5 +1,5 @@
|
||||
[[mongo.jsonSchema]]
|
||||
=== JSON Schema
|
||||
= JSON Schema
|
||||
|
||||
As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema].
|
||||
The schema itself and both validation action and level can be defined when creating the collection, as the following example shows:
|
||||
@@ -58,7 +58,7 @@ MongoJsonSchema.builder() <1>
|
||||
<1> Obtain a schema builder to configure the schema with a fluent API.
|
||||
<2> Configure required properties either directly as shown here or with more details as in 3.
|
||||
<3> Configure the required String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`.
|
||||
<4> Build the schema object. Use the schema to create either a collection or <<mongodb-template-query.criteria,query documents>>.
|
||||
<4> Build the schema object.
|
||||
====
|
||||
|
||||
There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available
|
||||
@@ -87,14 +87,14 @@ template.createCollection(Person.class, CollectionOptions.empty().schema(schema)
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema.generated]]
|
||||
==== Generating a Schema
|
||||
== Generating a Schema
|
||||
|
||||
Setting up a schema can be a time consuming task and we encourage everyone who decides to do so, to really take the time it takes.
|
||||
It's important, schema changes can be hard.
|
||||
However, there might be times when one does not want to balked with it, and that is where `JsonSchemaCreator` comes into play.
|
||||
|
||||
`JsonSchemaCreator` and its default implementation generates a `MongoJsonSchema` out of domain types metadata provided by the mapping infrastructure.
|
||||
This means, that <<mapping-usage-annotations, annotated properties>> as well as potential <<mapping-configuration, custom conversions>> are considered.
|
||||
This means, that xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[annotated properties] as well as potential xref:mongodb/mapping/mapping.adoc#mapping-configuration[custom conversions] are considered.
|
||||
|
||||
.Generate Json Schema from domain type
|
||||
====
|
||||
@@ -287,23 +287,8 @@ class B extends Root {
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema.query]]
|
||||
==== Query a collection for matching JSON Schema
|
||||
|
||||
You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows:
|
||||
|
||||
.Query for Documents matching a `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.find(query(matchingDocumentStructure(schema)), Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema.encrypted-fields]]
|
||||
==== Encrypted Fields
|
||||
== Encrypted Fields
|
||||
|
||||
MongoDB 4.2 https://docs.mongodb.com/master/core/security-client-side-encryption/[Field Level Encryption] allows to directly encrypt individual properties.
|
||||
|
||||
@@ -399,7 +384,7 @@ public class EncryptionExtension implements EvaluationContextExtension {
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema.types]]
|
||||
==== JSON Schema Types
|
||||
== JSON Schema Types
|
||||
|
||||
The following table shows the supported JSON schema types:
|
||||
|
||||
@@ -1,31 +1,34 @@
|
||||
|
||||
[[mapping-chapter]]
|
||||
= Mapping
|
||||
= Object Mapping
|
||||
|
||||
Rich mapping support is provided by the `MappingMongoConverter`. `MappingMongoConverter` has a rich metadata model that provides a full feature set to map domain objects to MongoDB documents.
|
||||
Rich mapping support is provided by the `MappingMongoConverter`.
|
||||
The converter holds a metadata model that provides a full feature set to map domain objects to MongoDB documents.
|
||||
The mapping metadata model is populated by using annotations on your domain objects.
|
||||
However, the infrastructure is not limited to using annotations as the only source of metadata information.
|
||||
The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions.
|
||||
|
||||
This section describes the features of the `MappingMongoConverter`, including fundamentals, how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata.
|
||||
|
||||
include::../{spring-data-commons-docs}/object-mapping.adoc[leveloffset=+1]
|
||||
include::{commons}@data-commons::page$object-mapping.adoc[leveloffset=+1]
|
||||
|
||||
[[mapping-conventions]]
|
||||
== Convention-based Mapping
|
||||
|
||||
`MappingMongoConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided. The conventions are:
|
||||
`MappingMongoConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided.
|
||||
The conventions are:
|
||||
|
||||
* The short Java class name is mapped to the collection name in the following manner. The class `com.bigbank.SavingsAccount` maps to the `savingsAccount` collection name.
|
||||
* The short Java class name is mapped to the collection name in the following manner.
|
||||
The class `com.bigbank.SavingsAccount` maps to the `savingsAccount` collection name.
|
||||
* All nested objects are stored as nested objects in the document and *not* as DBRefs.
|
||||
* The converter uses any Spring Converters registered with it to override the default mapping of object properties to document fields and values.
|
||||
* The fields of an object are used to convert to and from fields in the document. Public `JavaBean` properties are not used.
|
||||
* If you have a single non-zero-argument constructor whose constructor argument names match top-level field names of document, that constructor is used. Otherwise, the zero-argument constructor is used. If there is more than one non-zero-argument constructor, an exception will be thrown.
|
||||
* The fields of an object are used to convert to and from fields in the document.
|
||||
Public `JavaBean` properties are not used.
|
||||
* If you have a single non-zero-argument constructor whose constructor argument names match top-level field names of document, that constructor is used.Otherwise, the zero-argument constructor is used.If there is more than one non-zero-argument constructor, an exception will be thrown.
|
||||
|
||||
[[mapping.conventions.id-field]]
|
||||
=== How the `_id` field is handled in the mapping layer.
|
||||
|
||||
MongoDB requires that you have an `_id` field for all documents. If you don't provide one the driver will assign a ObjectId with a generated value. The "_id" field can be of any type the, other than arrays, so long as it is unique. The driver naturally supports all primitive types and Dates. When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class is mapped to this `_id` field.
|
||||
MongoDB requires that you have an `_id` field for all documents.If you don't provide one the driver will assign a ObjectId with a generated value.The "_id" field can be of any type the, other than arrays, so long as it is unique.The driver naturally supports all primitive types and Dates.When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class is mapped to this `_id` field.
|
||||
|
||||
The following outlines what field will be mapped to the `_id` document field:
|
||||
|
||||
@@ -68,11 +71,14 @@ When querying and updating `MongoTemplate` will use the converter to handle conv
|
||||
[[mapping-conversion]]
|
||||
== Data Mapping and Type Conversion
|
||||
|
||||
This section explains how types are mapped to and from a MongoDB representation. Spring Data MongoDB supports all types that can be represented as BSON, MongoDB's internal document format.
|
||||
In addition to these types, Spring Data MongoDB provides a set of built-in converters to map additional types. You can provide your own converters to adjust type conversion. See <<mongo.custom-converters>> for further details.
|
||||
|
||||
The following provides samples of each available type conversion:
|
||||
Spring Data MongoDB supports all types that can be represented as BSON, MongoDB's internal document format.
|
||||
In addition to these types, Spring Data MongoDB provides a set of built-in converters to map additional types.
|
||||
You can provide your own converters to adjust type conversion.
|
||||
See xref:mongodb/mapping/custom-conversions.adoc[Custom Conversions - Overriding Default Mapping] for further details.
|
||||
|
||||
.Built in Type conversions:
|
||||
[%collapsible]
|
||||
====
|
||||
[cols="3,1,6", options="header"]
|
||||
.Type
|
||||
|===
|
||||
@@ -193,12 +199,12 @@ calling `get()` before the actual conversion
|
||||
|
||||
| `LocalDate` +
|
||||
(Joda, Java 8, JSR310-BackPort)
|
||||
| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via <<mapping-configuration,MongoConverterConfigurationAdapter>>]
|
||||
| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]]
|
||||
| `{"date" : ISODate("2019-11-12T00:00:00.000Z")}`
|
||||
|
||||
| `LocalDateTime`, `LocalTime` +
|
||||
(Joda, Java 8, JSR310-BackPort)
|
||||
| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via <<mapping-configuration,MongoConverterConfigurationAdapter>>]
|
||||
| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]]
|
||||
| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}`
|
||||
|
||||
| `DateTime` (Joda)
|
||||
@@ -259,7 +265,7 @@ calling `get()` before the actual conversion
|
||||
[ [ -73.97880 , 40.77247 ], [ -73.97036 , 40.76811 ] ]
|
||||
] }}`
|
||||
|===
|
||||
|
||||
====
|
||||
|
||||
[[mapping-configuration]]
|
||||
== Mapping Configuration
|
||||
@@ -268,9 +274,12 @@ Unless explicitly configured, an instance of `MappingMongoConverter` is created
|
||||
|
||||
You can configure the `MappingMongoConverter` as well as `com.mongodb.client.MongoClient` and MongoTemplate by using either Java-based or XML-based metadata. The following example shows the configuration:
|
||||
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Java::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
public class MongoConfig extends AbstractMongoClientConfiguration {
|
||||
@@ -299,11 +308,13 @@ public class MongoConfig extends AbstractMongoClientConfiguration {
|
||||
return new LoggingEventListener<MongoMappingEvent>();
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
<1> The mapping base package defines the root path used to scan for entities used to pre initialize the `MappingContext`. By default the configuration classes package is used.
|
||||
<2> Configure additional custom converters for specific domain types that replace the default mapping procedure for those types with your custom implementation.
|
||||
|
||||
.XML
|
||||
[source,xml,role="secondary"]
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
@@ -340,11 +351,10 @@ public class MongoConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
</beans>
|
||||
----
|
||||
<1> The mapping base package defines the root path used to scan for entities used to pre initialize the `MappingContext`. By default the configuration classes package is used.
|
||||
<2> Configure additional custom converters for specific domain types that replace the default mapping procedure for those types with your custom implementation.
|
||||
====
|
||||
======
|
||||
|
||||
`AbstractMongoClientConfiguration` requires you to implement methods that define a `com.mongodb.client.MongoClient` as well as provide a database name. `AbstractMongoClientConfiguration` also has a method named `getMappingBasePackage(…)` that you can override to tell the converter where to scan for classes annotated with the `@Document` annotation.
|
||||
`AbstractMongoClientConfiguration` requires you to implement methods that define a `com.mongodb.client.MongoClient` as well as provide a database name.
|
||||
`AbstractMongoClientConfiguration` also has a method named `getMappingBasePackage(…)` that you can override to tell the converter where to scan for classes annotated with the `@Document` annotation.
|
||||
|
||||
You can add additional converters to the converter by overriding the `customConversionsConfiguration` method.
|
||||
MongoDB's native JSR-310 support can be enabled through `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()`.
|
||||
@@ -403,96 +413,7 @@ public class Person {
|
||||
IMPORTANT: The `@Id` annotation tells the mapper which property you want to use for the MongoDB `_id` property, and the `@Indexed` annotation tells the mapping framework to call `createIndex(…)` on that property of your document, making searches faster.
|
||||
Automatic index creation is only done for types annotated with `@Document`.
|
||||
|
||||
WARNING: Auto index creation is **disabled** by default and needs to be enabled through the configuration (see <<mapping.index-creation>>).
|
||||
|
||||
[[mapping.index-creation]]
|
||||
=== Index Creation
|
||||
|
||||
Spring Data MongoDB can automatically create indexes for entity types annotated with `@Document`.
|
||||
Index creation must be explicitly enabled since version 3.0 to prevent undesired effects with collection lifecyle and performance impact.
|
||||
Indexes are automatically created for the initial entity set on application startup and when accessing an entity type for the first time while the application runs.
|
||||
|
||||
We generally recommend explicit index creation for application-based control of indexes as Spring Data cannot automatically create indexes for collections that were recreated while the application was running.
|
||||
|
||||
`IndexResolver` provides an abstraction for programmatic index definition creation if you want to make use of `@Indexed` annotations such as `@GeoSpatialIndexed`, `@TextIndexed`, `@CompoundIndex` and `@WildcardIndexed`.
|
||||
You can use index definitions with `IndexOperations` to create indexes.
|
||||
A good point in time for index creation is on application startup, specifically after the application context was refreshed, triggered by observing `ContextRefreshedEvent`.
|
||||
This event guarantees that the context is fully initialized.
|
||||
Note that at this time other components, especially bean factories might have access to the MongoDB database.
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
``Map``-like properties are skipped by the `IndexResolver` unless annotated with `@WildcardIndexed` because the _map key_ must be part of the index definition. Since the purpose of maps is the usage of dynamic keys and values, the keys cannot be resolved from static mapping metadata.
|
||||
====
|
||||
|
||||
.Programmatic Index Creation for a single Domain Type
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class MyListener {
|
||||
|
||||
@EventListener(ContextRefreshedEvent.class)
|
||||
public void initIndicesAfterStartup() {
|
||||
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext = mongoTemplate
|
||||
.getConverter().getMappingContext();
|
||||
|
||||
IndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext);
|
||||
|
||||
IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);
|
||||
resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
.Programmatic Index Creation for all Initial Entities
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class MyListener{
|
||||
|
||||
@EventListener(ContextRefreshedEvent.class)
|
||||
public void initIndicesAfterStartup() {
|
||||
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext = mongoTemplate
|
||||
.getConverter().getMappingContext();
|
||||
|
||||
// consider only entities that are annotated with @Document
|
||||
mappingContext.getPersistentEntities()
|
||||
.stream()
|
||||
.filter(it -> it.isAnnotationPresent(Document.class))
|
||||
.forEach(it -> {
|
||||
|
||||
IndexOperations indexOps = mongoTemplate.indexOps(it.getType());
|
||||
resolver.resolveIndexFor(it.getType()).forEach(indexOps::ensureIndex);
|
||||
});
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Alternatively, if you want to ensure index and collection presence before any component is able to access your database from your application, declare a `@Bean` method for `MongoTemplate` and include the code from above before returning the `MongoTemplate` object.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
To turn automatic index creation _ON_ please override `autoIndexCreation()` in your configuration.
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
IMPORTANT: Automatic index creation is turned _OFF_ by default as of version 3.0.
|
||||
WARNING: Auto index creation is **disabled** by default and needs to be enabled through the configuration (see xref:mongodb/mapping/mapping.adoc#mapping.index-creation[Index Creation]).
|
||||
|
||||
[[mapping-usage-annotations]]
|
||||
=== Mapping Annotation Overview
|
||||
@@ -518,8 +439,9 @@ The MappingMongoConverter can use metadata to drive the mapping of objects to do
|
||||
|
||||
The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand.
|
||||
|
||||
Here is an example of a more complex mapping.
|
||||
|
||||
.Here is an example of a more complex mapping
|
||||
[%collapsible]
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@@ -574,6 +496,7 @@ public class Person<T extends Address> {
|
||||
// other getters/setters omitted
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
@@ -648,268 +571,10 @@ NOTE: The SpEL expression in the `@Value` annotation of the `quantity` parameter
|
||||
|
||||
Additional examples for using the `@PersistenceConstructor` annotation can be found in the https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java[MappingMongoConverterUnitTests] test suite.
|
||||
|
||||
[[mapping-usage-indexes.compound-index]]
|
||||
=== Compound Indexes
|
||||
|
||||
Compound indexes are also supported. They are defined at the class level, rather than on individual properties.
|
||||
|
||||
NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields
|
||||
|
||||
Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order:
|
||||
|
||||
.Example Compound Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
package com.mycompany.domain;
|
||||
|
||||
@Document
|
||||
@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}")
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
private Integer age;
|
||||
private String firstName;
|
||||
private String lastName;
|
||||
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
`@CompoundIndex` is repeatable using `@CompoundIndexes` as its container.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}")
|
||||
@CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}")
|
||||
public class Person {
|
||||
|
||||
String firstname;
|
||||
String lastname;
|
||||
|
||||
Address address;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.hashed-index]]
|
||||
=== Hashed Indexes
|
||||
|
||||
Hashed indexes allow hash based sharding within a sharded cluster.
|
||||
Using hashed field values to shard collections results in a more random distribution.
|
||||
For details, refer to the https://docs.mongodb.com/manual/core/index-hashed/[MongoDB Documentation].
|
||||
|
||||
Here's an example that creates a hashed index for `_id`:
|
||||
|
||||
.Example Hashed Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@HashIndexed @Id String id;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Hashed indexes can be created next to other index definitions like shown below, in that case both indices are created:
|
||||
|
||||
.Example Hashed Index Usage togehter with simple index
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@Indexed
|
||||
@HashIndexed
|
||||
String value;
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
In case the example above is too verbose, a compound annotation allows to reduce the number of annotations that need to be declared on a property:
|
||||
|
||||
.Example Composed Hashed Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class DomainType {
|
||||
|
||||
@IndexAndHash(name = "idx...") <1>
|
||||
String value;
|
||||
|
||||
// ...
|
||||
}
|
||||
|
||||
@Indexed
|
||||
@HashIndexed
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface IndexAndHash {
|
||||
|
||||
@AliasFor(annotation = Indexed.class, attribute = "name") <1>
|
||||
String name() default "";
|
||||
}
|
||||
----
|
||||
<1> Potentially register an alias for certain attributes of the meta annotation.
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Although index creation via annotations comes in handy for many scenarios cosider taking over more control by setting up indices manually via `IndexOperations`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
mongoOperations.indexOpsFor(Jedi.class)
|
||||
.ensureIndex(HashedIndex.hashed("useTheForce"));
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.wildcard-index]]
|
||||
=== Wildcard Indexes
|
||||
|
||||
A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern.
|
||||
For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation].
|
||||
|
||||
The index can be set up programmatically using `WildcardIndex` via `IndexOperations`.
|
||||
|
||||
.Programmatic WildcardIndex setup
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
mongoOperations
|
||||
.indexOps(User.class)
|
||||
.ensureIndex(new WildcardIndex("userMetadata"));
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
----
|
||||
====
|
||||
|
||||
The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property.
|
||||
|
||||
If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a
|
||||
wildcard index for it.
|
||||
|
||||
.Wildcard index on domain type
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@WildcardIndexed
|
||||
public class Product {
|
||||
// …
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.product.createIndex({ "$**" : 1 },{})
|
||||
----
|
||||
====
|
||||
|
||||
The `wildcardProjection` can be used to specify keys to in-/exclude in the index.
|
||||
|
||||
.Wildcard index with `wildcardProjection`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
|
||||
public class User {
|
||||
private @Id String id;
|
||||
private UserMetadata userMetadata;
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex(
|
||||
{ "$**" : 1 },
|
||||
{ "wildcardProjection" :
|
||||
{ "userMetadata.age" : 0 }
|
||||
}
|
||||
)
|
||||
----
|
||||
====
|
||||
|
||||
Wildcard indexes can also be expressed by adding the annotation directly to the field.
|
||||
Please note that `wildcardProjection` is not allowed on nested paths such as properties.
|
||||
Projections on types annotated with `@WildcardIndexed` are omitted during index creation.
|
||||
|
||||
.Wildcard index on property
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class User {
|
||||
private @Id String id;
|
||||
|
||||
@WildcardIndexed
|
||||
private UserMetadata userMetadata;
|
||||
}
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-usage-indexes.text-index]]
|
||||
=== Text Indexes
|
||||
|
||||
NOTE: The text index feature is disabled by default for MongoDB v.2.4.
|
||||
|
||||
Creating a text index allows accumulating several fields into a searchable full-text index.
|
||||
It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index.
|
||||
Properties can be weighted to influence the document score for ranking results.
|
||||
The default language for the text index is English.To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`).
|
||||
Using a property called `language` or `@Language` lets you define a language override on a per-document base.
|
||||
The following example shows how to created a text index and set the language to Spanish:
|
||||
|
||||
.Example Text Index Usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document(language = "spanish")
|
||||
class SomeEntity {
|
||||
|
||||
@TextIndexed String foo;
|
||||
|
||||
@Language String lang;
|
||||
|
||||
Nested nested;
|
||||
}
|
||||
|
||||
class Nested {
|
||||
|
||||
@TextIndexed(weight=5) String bar;
|
||||
String roo;
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
include::document-references.adoc[]
|
||||
|
||||
[[mapping-usage-events]]
|
||||
=== Mapping Framework Events
|
||||
|
||||
Events are fired throughout the lifecycle of the mapping process. This is described in the <<mongodb.mapping-usage.events,Lifecycle Events>> section.
|
||||
Events are fired throughout the lifecycle of the mapping process.
|
||||
This is described in the xref:mongodb/lifecycle-events.adoc[Lifecycle Events] section.
|
||||
|
||||
Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched.
|
||||
|
||||
include::unwrapping-entities.adoc[]
|
||||
|
||||
include::mongo-custom-conversions.adoc[]
|
||||
include::mongo-property-converters.adoc[]
|
||||
@@ -1,7 +1,7 @@
|
||||
[[mongo.property-converters]]
|
||||
== Property Converters - Mapping specific fields
|
||||
= Property Converters
|
||||
|
||||
While <<mongo.custom-converters, type-based conversion>> already offers ways to influence the conversion and representation of certain types within the target store, it has limitations when only certain values or properties of a particular type should be considered for conversion.
|
||||
While xref:mongodb/mapping/custom-conversions.adoc[type-based conversion] already offers ways to influence the conversion and representation of certain types within the target store, it has limitations when only certain values or properties of a particular type should be considered for conversion.
|
||||
Property-based converters allow configuring conversion rules on a per-property basis, either declaratively (via `@ValueConverter`) or programmatically (by registering a `PropertyValueConverter` for a specific property).
|
||||
|
||||
A `PropertyValueConverter` can transform a given value into its store representation (write) and back (read) as the following listing shows.
|
||||
@@ -35,7 +35,7 @@ You can use `PropertyValueConverterFactory.beanFactoryAware(…)` to obtain a `P
|
||||
You can change the default behavior through `ConverterConfiguration`.
|
||||
|
||||
[[mongo.property-converters.declarative]]
|
||||
=== Declarative Value Converter
|
||||
== Declarative Value Converter
|
||||
|
||||
The most straight forward usage of a `PropertyValueConverter` is by annotating properties with the `@ValueConverter` annotation that defines the converter type:
|
||||
|
||||
@@ -52,7 +52,7 @@ class Person {
|
||||
====
|
||||
|
||||
[[mongo.property-converters.programmatic]]
|
||||
=== Programmatic Value Converter Registration
|
||||
== Programmatic Value Converter Registration
|
||||
|
||||
Programmatic registration registers `PropertyValueConverter` instances for properties within an entity model by using a `PropertyValueConverterRegistrar`, as the following example shows.
|
||||
The difference between declarative registration and programmatic registration is that programmatic registration happens entirely outside of the entity model.
|
||||
@@ -78,17 +78,10 @@ registrar.registerConverter(Person.class, Person::getSsn())
|
||||
|
||||
WARNING: Dot notation (such as `registerConverter(Person.class, "address.street", …)`) for nagivating across properties into subdocuments is *not* supported when registering converters.
|
||||
|
||||
[[mongo.property-converters.value-conversions]]
|
||||
=== MongoDB property value conversions
|
||||
TIP: `MongoValueConverter` offers a pre-typed `PropertyValueConverter` interface that uses `MongoConversionContext`.
|
||||
|
||||
The preceding sections outlined the purpose an overall structure of `PropertyValueConverters`.
|
||||
This section focuses on MongoDB specific aspects.
|
||||
|
||||
==== MongoValueConverter and MongoConversionContext
|
||||
|
||||
`MongoValueConverter` offers a pre-typed `PropertyValueConverter` interface that uses `MongoConversionContext`.
|
||||
|
||||
==== MongoCustomConversions configuration
|
||||
[[mongocustomconversions-configuration]]
|
||||
== MongoCustomConversions configuration
|
||||
|
||||
By default, `MongoCustomConversions` can handle declarative value converters, depending on the configured `PropertyValueConverterFactory`.
|
||||
`MongoConverterConfigurationAdapter` helps to set up programmatic value conversions or define the `PropertyValueConverterFactory` to be used.
|
||||
@@ -1,10 +1,10 @@
|
||||
[[unwrapped-entities]]
|
||||
== Unwrapping Types
|
||||
= Unwrapping Types
|
||||
|
||||
Unwrapped entities are used to design value objects in your Java domain model whose properties are flattened out into the parent's MongoDB Document.
|
||||
|
||||
[[unwrapped-entities.mapping]]
|
||||
=== Unwrapped Types Mapping
|
||||
== Unwrapped Types Mapping
|
||||
|
||||
Consider the following domain model where `User.name` is annotated with `@Unwrapped`.
|
||||
The `@Unwrapped` annotation signals that all properties of `UserName` should be flattened out into the `user` document that owns the `name` property.
|
||||
@@ -53,7 +53,7 @@ However, those must not be, nor contain unwrapped fields themselves.
|
||||
====
|
||||
|
||||
[[unwrapped-entities.mapping.field-names]]
|
||||
=== Unwrapped Types field names
|
||||
== Unwrapped Types field names
|
||||
|
||||
A value object can be unwrapped multiple times by using the optional `prefix` attribute of the `@Unwrapped` annotation.
|
||||
By dosing so the chosen prefix is prepended to each property or `@Field("…")` name in the unwrapped object.
|
||||
@@ -136,7 +136,7 @@ public class UserName {
|
||||
====
|
||||
|
||||
[[unwrapped-entities.queries]]
|
||||
=== Query on Unwrapped Objects
|
||||
== Query on Unwrapped Objects
|
||||
|
||||
Defining queries on unwrapped properties is possible on type- as well as field-level as the provided `Criteria` is matched against the domain type.
|
||||
Prefixes and potential custom field names will be considered when rendering the actual query.
|
||||
@@ -179,7 +179,7 @@ db.collection.find({
|
||||
====
|
||||
|
||||
[[unwrapped-entities.queries.sort]]
|
||||
==== Sort by unwrapped field.
|
||||
=== Sort by unwrapped field.
|
||||
|
||||
Fields of unwrapped objects can be used for sorting via their property path as shown in the sample below.
|
||||
|
||||
@@ -205,7 +205,7 @@ Though possible, using the unwrapped object itself as sort criteria includes all
|
||||
====
|
||||
|
||||
[[unwrapped-entities.queries.project]]
|
||||
==== Field projection on unwrapped objects
|
||||
=== Field projection on unwrapped objects
|
||||
|
||||
Fields of unwrapped objects can be subject for projection either as a whole or via single fields as shown in the samples below.
|
||||
|
||||
@@ -253,13 +253,13 @@ db.collection.find({
|
||||
====
|
||||
|
||||
[[unwrapped-entities.queries.by-example]]
|
||||
==== Query By Example on unwrapped object.
|
||||
=== Query By Example on unwrapped object.
|
||||
|
||||
Unwrapped objects can be used within an `Example` probe just as any other type.
|
||||
Please review the <<query-by-example.running,Query By Example>> section, to learn more about this feature.
|
||||
Please review the xref:mongodb/template-query-operations.adoc#mongo.query-by-example[Query By Example] section, to learn more about this feature.
|
||||
|
||||
[[unwrapped-entities.queries.repository]]
|
||||
==== Repository Queries on unwrapped objects.
|
||||
=== Repository Queries on unwrapped objects.
|
||||
|
||||
The `Repository` abstraction allows deriving queries on fields of unwrapped objects as well as the entire object.
|
||||
|
||||
@@ -284,7 +284,7 @@ Index creation for unwrapped objects is suspended even if the repository `create
|
||||
====
|
||||
|
||||
[[unwrapped-entities.update]]
|
||||
=== Update on Unwrapped Objects
|
||||
== Update on Unwrapped Objects
|
||||
|
||||
Unwrapped objects can be updated as any other object that is part of the domain model.
|
||||
The mapping layer takes care of flattening structures into their surroundings.
|
||||
@@ -338,14 +338,14 @@ db.collection.update({
|
||||
====
|
||||
|
||||
[[unwrapped-entities.aggregations]]
|
||||
=== Aggregations on Unwrapped Objects
|
||||
== Aggregations on Unwrapped Objects
|
||||
|
||||
The <<mongo.aggregation,Aggregation Framework>> will attempt to map unwrapped values of typed aggregations.
|
||||
The xref:mongodb/aggregation-framework.adoc[Aggregation Framework] will attempt to map unwrapped values of typed aggregations.
|
||||
Please make sure to work with the property path including the wrapper object when referencing one of its values.
|
||||
Other than that no special action is required.
|
||||
|
||||
[[unwrapped-entities.indexes]]
|
||||
=== Index on Unwrapped Objects
|
||||
== Index on Unwrapped Objects
|
||||
|
||||
It is possible to attach the `@Indexed` annotation to properties of an unwrapped type just as it is done with regular objects.
|
||||
It is not possible to use `@Indexed` along with the `@Unwrapped` annotation on the owning property.
|
||||
@@ -1,5 +1,5 @@
|
||||
[[mongo.encryption]]
|
||||
= Client Side Field Level Encryption (CSFLE)
|
||||
= Encryption (CSFLE)
|
||||
|
||||
Client Side Encryption is a feature that encrypts data in your application before it is sent to MongoDB.
|
||||
We recommend you get familiar with the concepts, ideally from the https://www.mongodb.com/docs/manual/core/csfle/[MongoDB Documentation] to learn more about its capabilities and restrictions before you continue applying Encryption through Spring Data.
|
||||
@@ -15,9 +15,9 @@ Specific data types require deterministic encryption to preserve equality compar
|
||||
== Automatic Encryption
|
||||
|
||||
MongoDB supports https://www.mongodb.com/docs/manual/core/csfle/[Client-Side Field Level Encryption] out of the box using the MongoDB driver with its Automatic Encryption feature.
|
||||
Automatic Encryption requires a <<mongo.jsonSchema,JSON Schema>> that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step.
|
||||
Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step.
|
||||
|
||||
Please refer to the <<mongo.jsonSchema.encrypted-fields,JSON Schema>> section for more information on defining a JSON Schema that holds encryption information.
|
||||
Please refer to the xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema] section for more information on defining a JSON Schema that holds encryption information.
|
||||
|
||||
To make use of a the `MongoJsonSchema` it needs to be combined with `AutoEncryptionSettings` which can be done eg. via a `MongoClientSettingsBuilderCustomizer`.
|
||||
|
||||
@@ -50,7 +50,7 @@ MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) {
|
||||
== Explicit Encryption
|
||||
|
||||
Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks.
|
||||
The `@ExplicitEncrypted` annotation is a combination of the `@Encrypted` annotation used for <<mongo.jsonSchema.encrypted-fields,JSON Schema creation>> and a <<mongo.property-converters, Property Converter>>.
|
||||
The `@ExplicitEncrypted` annotation is a combination of the `@Encrypted` annotation used for xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation] and a xref:mongodb/mapping/property-converters.adoc[Property Converter].
|
||||
In other words, `@ExplicitEncrypted` uses existing building blocks to combine them for simplified explicit encryption support.
|
||||
|
||||
[NOTE]
|
||||
@@ -112,7 +112,7 @@ Fields cannot be used in queries/aggregations.
|
||||
|
||||
By default, the `@ExplicitEncrypted(value=…)` attribute references a `MongoEncryptionConverter`.
|
||||
It is possible to change the default implementation and exchange it with any `PropertyValueConverter` implementation by providing the according type reference.
|
||||
To learn more about custom `PropertyValueConverters` and the required configuration, please refer to the <<mongo.property-converters>> section.
|
||||
To learn more about custom `PropertyValueConverters` and the required configuration, please refer to the xref:mongodb/mapping/property-converters.adoc[Property Converters - Mapping specific fields] section.
|
||||
|
||||
[[mongo.encryption.explicit-setup]]
|
||||
=== MongoEncryptionConverter Setup
|
||||
97
src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc
Normal file
97
src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc
Normal file
@@ -0,0 +1,97 @@
|
||||
[[mongo.group]]
|
||||
= Group Operations
|
||||
|
||||
As an alternative to using Map-Reduce to perform data aggregation, you can use the https://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group[`group` operation] which feels similar to using SQL's group by query style, so it may feel more approachable vs. using Map-Reduce. Using the group operations does have some limitations, for example it is not supported in a shared environment and it returns the full result set in a single BSON object, so the result should be small, less than 10,000 keys.
|
||||
|
||||
Spring provides integration with MongoDB's group operation by providing methods on MongoOperations to simplify the creation and running of group operations. It can convert the results of the group operation to a POJO and also integrates with Spring's https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#resources[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files if often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer.
|
||||
|
||||
[[mongo.group.example]]
|
||||
== Example Usage
|
||||
|
||||
In order to understand how group operations work the following example is used, which is somewhat artificial. For a more realistic example consult the book 'MongoDB - The definitive guide'. A collection named `group_test_collection` created with the following rows.
|
||||
|
||||
[source]
|
||||
----
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f1"), "x" : 1 }
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f2"), "x" : 1 }
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f3"), "x" : 2 }
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f4"), "x" : 3 }
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f5"), "x" : 3 }
|
||||
{ "_id" : ObjectId("4ec1d25d41421e2015da64f6"), "x" : 3 }
|
||||
----
|
||||
|
||||
We would like to group by the only field in each row, the `x` field and aggregate the number of times each specific value of `x` occurs. To do this we need to create an initial document that contains our count variable and also a reduce function which will increment it each time it is encountered. The Java code to run the group operation is shown below
|
||||
|
||||
[source,java]
|
||||
----
|
||||
GroupByResults<XObject> results = mongoTemplate.group("group_test_collection",
|
||||
GroupBy.key("x").initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"),
|
||||
XObject.class);
|
||||
----
|
||||
|
||||
The first argument is the name of the collection to run the group operation over, the second is a fluent API that specifies properties of the group operation via a `GroupBy` class. In this example we are using just the `intialDocument` and `reduceFunction` methods. You can also specify a key-function, as well as a finalizer as part of the fluent API. If you have multiple keys to group by, you can pass in a comma separated list of keys.
|
||||
|
||||
The raw results of the group operation is a JSON document that looks like this
|
||||
|
||||
[source]
|
||||
----
|
||||
{
|
||||
"retval" : [ { "x" : 1.0 , "count" : 2.0} ,
|
||||
{ "x" : 2.0 , "count" : 1.0} ,
|
||||
{ "x" : 3.0 , "count" : 3.0} ] ,
|
||||
"count" : 6.0 ,
|
||||
"keys" : 3 ,
|
||||
"ok" : 1.0
|
||||
}
|
||||
----
|
||||
|
||||
The document under the "retval" field is mapped onto the third argument in the group method, in this case XObject which is shown below.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class XObject {
|
||||
|
||||
private float x;
|
||||
|
||||
private float count;
|
||||
|
||||
|
||||
public float getX() {
|
||||
return x;
|
||||
}
|
||||
|
||||
public void setX(float x) {
|
||||
this.x = x;
|
||||
}
|
||||
|
||||
public float getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(float count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "XObject [x=" + x + " count = " + count + "]";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
You can also obtain the raw result as a `Document` by calling the method `getRawResults` on the `GroupByResults` class.
|
||||
|
||||
There is an additional method overload of the group method on `MongoOperations` which lets you specify a `Criteria` object for selecting a subset of the rows. An example which uses a `Criteria` object, with some syntax sugar using static imports, as well as referencing a key-function and reduce function javascript files via a Spring Resource string is shown below.
|
||||
|
||||
[source]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.mapreduce.GroupBy.keyFunction;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
|
||||
GroupByResults<XObject> results = mongoTemplate.group(where("x").gt(0),
|
||||
"group_test_collection",
|
||||
keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class);
|
||||
----
|
||||
|
||||
include:../:aggregation-framework.adoc[]
|
||||
|
||||
127
src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc
Normal file
127
src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc
Normal file
@@ -0,0 +1,127 @@
|
||||
[[mongo.mapreduce]]
|
||||
= Map-Reduce Operations
|
||||
|
||||
You can query MongoDB by using Map-Reduce, which is useful for batch processing, for data aggregation, and for when the query language does not fulfill your needs.
|
||||
|
||||
Spring provides integration with MongoDB's Map-Reduce by providing methods on `MongoOperations` to simplify the creation and running of Map-Reduce operations.It can convert the results of a Map-Reduce operation to a POJO and integrates with Spring's link:{springDocsUrl}/core.html#resources[Resource abstraction].This lets you place your JavaScript files on the file system, classpath, HTTP server, or any other Spring Resource implementation and then reference the JavaScript resources through an easy URI style syntax -- for example, `classpath:reduce.js;`.Externalizing JavaScript code in files is often preferable to embedding them as Java strings in your code.Note that you can still pass JavaScript code as Java strings if you prefer.
|
||||
|
||||
[[mongo.mapreduce.example]]
|
||||
== Example Usage
|
||||
|
||||
To understand how to perform Map-Reduce operations, we use an example from the book, _MongoDB - The Definitive Guide_ footnote:[Kristina Chodorow. _MongoDB - The Definitive Guide_. O'Reilly Media, 2013].In this example, we create three documents that have the values [a,b], [b,c], and [c,d], respectively.The values in each document are associated with the key, 'x', as the following example shows (assume these documents are in a collection named `jmr1`):
|
||||
|
||||
[source]
|
||||
----
|
||||
{ "_id" : ObjectId("4e5ff893c0277826074ec533"), "x" : [ "a", "b" ] }
|
||||
{ "_id" : ObjectId("4e5ff893c0277826074ec534"), "x" : [ "b", "c" ] }
|
||||
{ "_id" : ObjectId("4e5ff893c0277826074ec535"), "x" : [ "c", "d" ] }
|
||||
----
|
||||
|
||||
The following map function counts the occurrence of each letter in the array for each document:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
function () {
|
||||
for (var i = 0; i < this.x.length; i++) {
|
||||
emit(this.x[i], 1);
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The follwing reduce function sums up the occurrence of each letter across all the documents:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
function (key, values) {
|
||||
var sum = 0;
|
||||
for (var i = 0; i < values.length; i++)
|
||||
sum += values[i];
|
||||
return sum;
|
||||
}
|
||||
----
|
||||
|
||||
Running the preceding functions result in the following collection:
|
||||
|
||||
[source]
|
||||
----
|
||||
{ "_id" : "a", "value" : 1 }
|
||||
{ "_id" : "b", "value" : 2 }
|
||||
{ "_id" : "c", "value" : 2 }
|
||||
{ "_id" : "d", "value" : 1 }
|
||||
----
|
||||
|
||||
Assuming that the map and reduce functions are located in `map.js` and `reduce.js` and bundled in your jar so they are available on the classpath, you can run a Map-Reduce operation as follows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
MapReduceResults<ValueObject> results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class);
|
||||
for (ValueObject valueObject : results) {
|
||||
System.out.println(valueObject);
|
||||
}
|
||||
----
|
||||
|
||||
The preceding exmaple produces the following output:
|
||||
|
||||
[source]
|
||||
----
|
||||
ValueObject [id=a, value=1.0]
|
||||
ValueObject [id=b, value=2.0]
|
||||
ValueObject [id=c, value=2.0]
|
||||
ValueObject [id=d, value=1.0]
|
||||
----
|
||||
|
||||
The `MapReduceResults` class implements `Iterable` and provides access to the raw output and timing and count statistics.The following listing shows the `ValueObject` class:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class ValueObject {
|
||||
|
||||
private String id;
|
||||
private float value;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public float getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(float value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ValueObject [id=" + id + ", value=" + value + "]";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
By default, the output type of `INLINE` is used so that you need not specify an output collection.To specify additional Map-Reduce options, use an overloaded method that takes an additional `MapReduceOptions` argument.The class `MapReduceOptions` has a fluent API, so adding additional options can be done in a compact syntax.The following example sets the output collection to `jmr1_out` (note that setting only the output collection assumes a default output type of `REPLACE`):
|
||||
|
||||
[source,java]
|
||||
----
|
||||
MapReduceResults<ValueObject> results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js",
|
||||
new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class);
|
||||
----
|
||||
|
||||
There is also a static import (`import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.options;`) that can be used to make the syntax slightly more compact, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
MapReduceResults<ValueObject> results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js",
|
||||
options().outputCollection("jmr1_out"), ValueObject.class);
|
||||
----
|
||||
|
||||
You can also specify a query to reduce the set of data that is fed into the Map-Reduce operation.The following example removes the document that contains [a,b] from consideration for Map-Reduce operations:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Query query = new Query(where("x").ne(new String[] { "a", "b" }));
|
||||
MapReduceResults<ValueObject> results = mongoOperations.mapReduce(query, "jmr1", "classpath:map.js", "classpath:reduce.js",
|
||||
options().outputCollection("jmr1_out"), ValueObject.class);
|
||||
----
|
||||
|
||||
Note that you can specify additional limit and sort values on the query, but you cannot skip values.
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
[[mongo.server-side-scripts]]
|
||||
= Script Operations
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the `eval` command used
|
||||
by `ScriptOperations`. +
|
||||
There is no replacement for the removed functionality.
|
||||
====
|
||||
|
||||
MongoDB allows running JavaScript functions on the server by either directly sending the script or calling a stored one. `ScriptOperations` can be accessed through `MongoTemplate` and provides basic abstraction for `JavaScript` usage. The following example shows how to us the `ScriptOperations` class:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
ScriptOperations scriptOps = template.scriptOps();
|
||||
|
||||
ExecutableMongoScript echoScript = new ExecutableMongoScript("function(x) { return x; }");
|
||||
scriptOps.execute(echoScript, "directly execute script"); <1>
|
||||
|
||||
scriptOps.register(new NamedMongoScript("echo", echoScript)); <2>
|
||||
scriptOps.call("echo", "execute script via name"); <3>
|
||||
----
|
||||
<1> Run the script directly without storing the function on server side.
|
||||
<2> Store the script using 'echo' as its name. The given name identifies the script and allows calling it later.
|
||||
<3> Run the script with name 'echo' using the provided parameters.
|
||||
====
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
[[mongodb.repositories.misc.cdi-integration]]
|
||||
= CDI Integration
|
||||
|
||||
Instances of the repository interfaces are usually created by a container, and Spring is the most natural choice when working with Spring Data.
|
||||
As of version 1.3.0, Spring Data MongoDB ships with a custom CDI extension that lets you use the repository abstraction in CDI environments.
|
||||
The extension is part of the JAR.
|
||||
To activate it, drop the Spring Data MongoDB JAR into your classpath.
|
||||
You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class MongoTemplateProducer {
|
||||
|
||||
@Produces
|
||||
@ApplicationScoped
|
||||
public MongoOperations createMongoTemplate() {
|
||||
|
||||
MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database");
|
||||
return new MongoTemplate(factory);
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The Spring Data MongoDB CDI extension picks up the `MongoTemplate` available as a CDI bean and creates a proxy for a Spring Data repository whenever a bean of a repository type is requested by the container.
|
||||
Thus, obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class RepositoryClient {
|
||||
|
||||
@Inject
|
||||
PersonRepository repository;
|
||||
|
||||
public void businessMethod() {
|
||||
List<Person> people = repository.findAll();
|
||||
}
|
||||
}
|
||||
----
|
||||
@@ -0,0 +1,875 @@
|
||||
[[mongodb.repositories.queries]]
|
||||
= MongoDB-specific Query Methods
|
||||
|
||||
Most of the data access operations you usually trigger on a repository result in a query being executed against the MongoDB databases.
|
||||
Defining such a query is a matter of declaring a method on the repository interface, as the following example shows:
|
||||
|
||||
.PersonRepository with query methods
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends PagingAndSortingRepository<Person, String> {
|
||||
|
||||
List<Person> findByLastname(String lastname); <1>
|
||||
|
||||
Page<Person> findByFirstname(String firstname, Pageable pageable); <2>
|
||||
|
||||
Person findByShippingAddresses(Address address); <3>
|
||||
|
||||
Person findFirstByLastname(String lastname); <4>
|
||||
|
||||
Stream<Person> findAllBy(); <5>
|
||||
}
|
||||
----
|
||||
<1> The `findByLastname` method shows a query for all people with the given last name.
|
||||
The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`.
|
||||
Thus, the method name results in a query expression of `{"lastname" : lastname}`.
|
||||
<2> Applies pagination to a query.
|
||||
You can equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and Spring Data automatically pages the query accordingly.
|
||||
<3> Shows that you can query based on properties that are not primitive types.
|
||||
Throws `IncorrectResultSizeDataAccessException` if more than one match is found.
|
||||
<4> Uses the `First` keyword to restrict the query to only the first result.
|
||||
Unlike <3>, this method does not throw an exception if more than one match is found.
|
||||
<5> Uses a Java 8 `Stream` that reads and converts individual elements while iterating the stream.
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface ReactivePersonRepository extends ReactiveSortingRepository<Person, String> {
|
||||
|
||||
Flux<Person> findByFirstname(String firstname); <1>
|
||||
|
||||
Flux<Person> findByFirstname(Publisher<String> firstname); <2>
|
||||
|
||||
Flux<Person> findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3>
|
||||
|
||||
Mono<Person> findByFirstnameAndLastname(String firstname, String lastname); <4>
|
||||
|
||||
Mono<Person> findFirstByLastname(String lastname); <5>
|
||||
}
|
||||
----
|
||||
<1> The method shows a query for all people with the given `lastname`. The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. Thus, the method name results in a query expression of `{"lastname" : lastname}`.
|
||||
<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`.
|
||||
<3> Use `Pageable` to pass offset and sorting parameters to the database.
|
||||
<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results.
|
||||
<5> Unless <4>, the first entity is always emitted even if the query yields more result documents.
|
||||
|
||||
WARNING: The `Page` return type (as in `Mono<Page>`) is not supported by reactive repositories.
|
||||
|
||||
It is possible to use `Pageable` in derived finder methods, to pass on `sort`, `limit` and `offset` parameters to the query to reduce load and network traffic.
|
||||
The returned `Flux` will only emit data within the declared range.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Pageable page = PageRequest.of(1, 10, Sort.by("lastname"));
|
||||
Flux<Person> persons = repository.findByFirstnameOrderByLastname("luke", page);
|
||||
----
|
||||
====
|
||||
======
|
||||
|
||||
NOTE: We do not support referring to parameters that are mapped as `DBRef` in the domain class.
|
||||
|
||||
The following table shows the keywords that are supported for query methods:
|
||||
|
||||
[cols="1,2,3",options="header"]
|
||||
.Supported keywords for query methods
|
||||
|===
|
||||
| Keyword
|
||||
| Sample
|
||||
| Logical result
|
||||
|
||||
| `After`
|
||||
| `findByBirthdateAfter(Date date)`
|
||||
| `{"birthdate" : {"$gt" : date}}`
|
||||
|
||||
| `GreaterThan`
|
||||
| `findByAgeGreaterThan(int age)`
|
||||
| `{"age" : {"$gt" : age}}`
|
||||
|
||||
| `GreaterThanEqual`
|
||||
| `findByAgeGreaterThanEqual(int age)`
|
||||
| `{"age" : {"$gte" : age}}`
|
||||
|
||||
| `Before`
|
||||
| `findByBirthdateBefore(Date date)`
|
||||
| `{"birthdate" : {"$lt" : date}}`
|
||||
|
||||
| `LessThan`
|
||||
| `findByAgeLessThan(int age)`
|
||||
| `{"age" : {"$lt" : age}}`
|
||||
|
||||
| `LessThanEqual`
|
||||
| `findByAgeLessThanEqual(int age)`
|
||||
| `{"age" : {"$lte" : age}}`
|
||||
|
||||
| `Between`
|
||||
| `findByAgeBetween(int from, int to)` +
|
||||
`findByAgeBetween(Range<Integer> range)`
|
||||
| `{"age" : {"$gt" : from, "$lt" : to}}` +
|
||||
lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range`
|
||||
|
||||
| `In`
|
||||
| `findByAgeIn(Collection ages)`
|
||||
| `{"age" : {"$in" : [ages...]}}`
|
||||
|
||||
| `NotIn`
|
||||
| `findByAgeNotIn(Collection ages)`
|
||||
| `{"age" : {"$nin" : [ages...]}}`
|
||||
|
||||
| `IsNotNull`, `NotNull`
|
||||
| `findByFirstnameNotNull()`
|
||||
| `{"firstname" : {"$ne" : null}}`
|
||||
|
||||
| `IsNull`, `Null`
|
||||
| `findByFirstnameNull()`
|
||||
| `{"firstname" : null}`
|
||||
|
||||
| `Like`, `StartingWith`, `EndingWith`
|
||||
| `findByFirstnameLike(String name)`
|
||||
| `{"firstname" : name} (name as regex)`
|
||||
|
||||
| `NotLike`, `IsNotLike`
|
||||
| `findByFirstnameNotLike(String name)`
|
||||
| `{"firstname" : { "$not" : name }} (name as regex)`
|
||||
|
||||
| `Containing` on String
|
||||
| `findByFirstnameContaining(String name)`
|
||||
| `{"firstname" : name} (name as regex)`
|
||||
|
||||
| `NotContaining` on String
|
||||
| `findByFirstnameNotContaining(String name)`
|
||||
| `{"firstname" : { "$not" : name}} (name as regex)`
|
||||
|
||||
| `Containing` on Collection
|
||||
| `findByAddressesContaining(Address address)`
|
||||
| `{"addresses" : { "$in" : address}}`
|
||||
|
||||
| `NotContaining` on Collection
|
||||
| `findByAddressesNotContaining(Address address)`
|
||||
| `{"addresses" : { "$not" : { "$in" : address}}}`
|
||||
|
||||
| `Regex`
|
||||
| `findByFirstnameRegex(String firstname)`
|
||||
| `{"firstname" : {"$regex" : firstname }}`
|
||||
|
||||
| `(No keyword)`
|
||||
| `findByFirstname(String name)`
|
||||
| `{"firstname" : name}`
|
||||
|
||||
| `Not`
|
||||
| `findByFirstnameNot(String name)`
|
||||
| `{"firstname" : {"$ne" : name}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point)`
|
||||
| `{"location" : {"$near" : [x,y]}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point, Distance max)`
|
||||
| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point, Distance min, Distance max)`
|
||||
| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Circle circle)`
|
||||
| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Box box)`
|
||||
| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}`
|
||||
|
||||
| `IsTrue`, `True`
|
||||
| `findByActiveIsTrue()`
|
||||
| `{"active" : true}`
|
||||
|
||||
| `IsFalse`, `False`
|
||||
| `findByActiveIsFalse()`
|
||||
| `{"active" : false}`
|
||||
|
||||
| `Exists`
|
||||
| `findByLocationExists(boolean exists)`
|
||||
| `{"location" : {"$exists" : exists }}`
|
||||
|
||||
| `IgnoreCase`
|
||||
| `findByUsernameIgnoreCase(String username)`
|
||||
| `{"username" : {"$regex" : "^username$", "$options" : "i" }}`
|
||||
|===
|
||||
|
||||
NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters.
|
||||
|
||||
[[mongodb.repositories.queries.geo-spatial]]
|
||||
== Geo-spatial Queries
|
||||
|
||||
As you saw in the preceding table of keywords, a few keywords trigger geo-spatial operations within a MongoDB query.
|
||||
The `Near` keyword allows some further modification, as the next few examples show.
|
||||
|
||||
The following example shows how to define a `near` query that finds all persons with a given distance of a given point:
|
||||
|
||||
.Advanced `Near` queries
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
// { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}}
|
||||
List<Person> findByLocationNear(Point location, Distance distance);
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
// { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}}
|
||||
Flux<Person> findByLocationNear(Point location, Distance distance);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
Adding a `Distance` parameter to the query method allows restricting results to those within the given distance.
|
||||
If the `Distance` was set up containing a `Metric`, we transparently use `$nearSphere` instead of `$code`, as the following example shows:
|
||||
|
||||
.Using `Distance` with `Metrics`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(43.7, 48.8);
|
||||
Distance distance = new Distance(200, Metrics.KILOMETERS);
|
||||
… = repository.findByLocationNear(point, distance);
|
||||
// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult<T>` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. However, you can still pass in a `Pageable` argument to page results yourself.
|
||||
|
||||
Using a `Distance` with a `Metric` causes a `$nearSphere` (instead of a plain `$near`) clause to be added.
|
||||
Beyond that, the actual distance gets calculated according to the `Metrics` used.
|
||||
|
||||
(Note that `Metric` does not refer to metric units of measure.
|
||||
It could be miles rather than kilometers.
|
||||
Rather, `metric` refers to the concept of a system of measurement, regardless of which system you use.)
|
||||
|
||||
NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of the `$nearSphere` operator.
|
||||
|
||||
[[geo-near-queries]]
|
||||
=== Geo-near Queries
|
||||
|
||||
Spring Data MongoDb supports geo-near queries, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
GeoResults<Person> findByLocationNear(Point location);
|
||||
|
||||
// No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance }
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance,
|
||||
// 'distanceMultiplier' : metric.multiplier, 'spherical' : true }
|
||||
GeoResults<Person> findByLocationNear(Point location, Distance distance);
|
||||
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min,
|
||||
// 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier,
|
||||
// 'spherical' : true }
|
||||
GeoResults<Person> findByLocationNear(Point location, Distance min, Distance max);
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
GeoResults<Person> findByLocationNear(Point location);
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location);
|
||||
|
||||
// No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance }
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance,
|
||||
// 'distanceMultiplier' : metric.multiplier, 'spherical' : true }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance distance);
|
||||
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min,
|
||||
// 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier,
|
||||
// 'spherical' : true }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance min, Distance max);
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
[[mongodb.repositories.queries.json-based]]
|
||||
== JSON-based Query Methods and Field Restriction
|
||||
|
||||
By adding the `org.springframework.data.mongodb.repository.Query` annotation to your repository query methods, you can specify a MongoDB JSON query string to use instead of having the query be derived from the method name, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{ 'firstname' : ?0 }")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Query("{ 'firstname' : ?0 }")
|
||||
Flux<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
The `?0` placeholder lets you substitute the value from the method arguments into the JSON query string.
|
||||
|
||||
NOTE: `String` parameter values are escaped during the binding process, which means that it is not possible to add MongoDB specific operators through the argument.
|
||||
|
||||
You can also use the filter property to restrict the set of properties that is mapped into the Java object, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}")
|
||||
Flux<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
The query in the preceding example returns only the `firstname`, `lastname` and `Id` properties of the `Person` objects.
|
||||
The `age` property, a `java.lang.Integer`, is not set and its value is therefore null.
|
||||
|
||||
[[mongodb.repositories.queries.sort]]
|
||||
== Sorting Results
|
||||
|
||||
MongoDB repositories allow various approaches to define sorting order.
|
||||
Let's take a look at the following example:
|
||||
|
||||
.Sorting Query Results
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
List<Person> findByFirstnameSortByAgeDesc(String firstname); <1>
|
||||
|
||||
List<Person> findByFirstname(String firstname, Sort sort); <2>
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
List<Person> findByFirstname(String firstname); <3>
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
List<Person> findByLastname(String lastname, Sort sort); <4>
|
||||
}
|
||||
----
|
||||
<1> Static sorting derived from method name. `SortByAgeDesc` results in `{ age : -1 }` for the sort parameter.
|
||||
<2> Dynamic sorting using a method argument.
|
||||
`Sort.by(DESC, "age")` creates `{ age : -1 }` for the sort parameter.
|
||||
<3> Static sorting via `Query` annotation.
|
||||
Sort parameter applied as stated in the `sort` attribute.
|
||||
<4> Default sorting via `Query` annotation combined with dynamic one via a method argument. `Sort.unsorted()`
|
||||
results in `{ age : -1 }`.
|
||||
Using `Sort.by(ASC, "age")` overrides the defaults and creates `{ age : 1 }`.
|
||||
`Sort.by
|
||||
(ASC, "firstname")` alters the default and results in `{ age : -1, firstname : 1 }`.
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
Flux<Person> findByFirstnameSortByAgeDesc(String firstname);
|
||||
|
||||
Flux<Person> findByFirstname(String firstname, Sort sort);
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
Flux<Person> findByFirstname(String firstname);
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
Flux<Person> findByLastname(String lastname, Sort sort);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
[[mongodb.repositories.queries.json-spel]]
|
||||
== JSON-based Queries with SpEL Expressions
|
||||
|
||||
Query strings and field definitions can be used together with SpEL expressions to create dynamic queries at runtime.
|
||||
SpEL expressions can provide predicate values and can be used to extend predicates with subdocuments.
|
||||
|
||||
Expressions expose method arguments through an array that contains all the arguments.
|
||||
The following query uses `[0]`
|
||||
to declare the predicate value for `lastname` (which is equivalent to the `?0` parameter binding):
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{'lastname': ?#{[0]} }")
|
||||
List<Person> findByQueryWithExpression(String param0);
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Query("{'lastname': ?#{[0]} }")
|
||||
Flux<Person> findByQueryWithExpression(String param0);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
Expressions can be used to invoke functions, evaluate conditionals, and construct values.
|
||||
SpEL expressions used in conjunction with JSON reveal a side-effect, because Map-like declarations inside of SpEL read like JSON, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}")
|
||||
List<Person> findByQueryWithExpressionAndNestedObject(boolean param0, String param1);
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}")
|
||||
Flux<Person> findByQueryWithExpressionAndNestedObject(boolean param0, String param1);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
WARNING: SpEL in query strings can be a powerful way to enhance queries.
|
||||
However, they can also accept a broad range of unwanted arguments.
|
||||
Make sure to sanitize strings before passing them to the query to avoid creation of vulnerabilities or unwanted changes to your query.
|
||||
|
||||
Expression support is extensible through the Query SPI: `EvaluationContextExtension` & `ReactiveEvaluationContextExtension`
|
||||
The Query SPI can contribute properties and functions and can customize the root object.
|
||||
Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built.
|
||||
The following example shows how to use an evaluation context extension:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport {
|
||||
|
||||
@Override
|
||||
public String getExtensionId() {
|
||||
return "security";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getProperties() {
|
||||
return Collections.singletonMap("principal", SecurityContextHolder.getCurrent().getPrincipal());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public class SampleEvaluationContextExtension implements ReactiveEvaluationContextExtension {
|
||||
|
||||
@Override
|
||||
public String getExtensionId() {
|
||||
return "security";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends EvaluationContextExtension> getExtension() {
|
||||
return Mono.just(new EvaluationContextExtensionSupport() { ... });
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
NOTE: Bootstrapping `MongoRepositoryFactory` yourself is not application context-aware and requires further configuration to pick up Query SPI extensions.
|
||||
|
||||
NOTE: Reactive query methods can make use of `org.springframework.data.spel.spi.ReactiveEvaluationContextExtension`.
|
||||
|
||||
[[mongodb.repositories.queries.update]]
|
||||
== Update Methods
|
||||
|
||||
You can also use the keywords in the preceding table to create queries that identify matching documents for running updates on them.
|
||||
The actual update action is defined by the `@Update` annotation on the method itself, as the following listing shows.
|
||||
Note that the naming schema for derived queries starts with `find`.
|
||||
Using `update` (as in `updateAllByLastname(...)`) is allowed only in combination with `@Query`.
|
||||
|
||||
The update is applied to *all* matching documents and it is *not* possible to limit the scope by passing in a `Page` or by using any of the <<repositories.limit-query-result,limiting keywords>>.
|
||||
The return type can be either `void` or a _numeric_ type, such as `long`, to hold the number of modified documents.
|
||||
|
||||
.Update Methods
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : 1 } }")
|
||||
long findAndIncrementVisitsByLastname(String lastname); <1>
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void findAndIncrementVisitsByLastname(String lastname, int increment); <2>
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : ?#{[1]} } }")
|
||||
long findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); <3>
|
||||
|
||||
@Update(pipeline = {"{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }"})
|
||||
void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); <4>
|
||||
|
||||
@Update("{ '$push' : { 'shippingAddresses' : ?1 } }")
|
||||
long findAndPushShippingAddressByEmail(String email, Address address); <5>
|
||||
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void updateAllByLastname(String lastname, int increment); <6>
|
||||
}
|
||||
----
|
||||
|
||||
<1> The filter query for the update is derived from the method name.
|
||||
The update is "`as is`" and does not bind any parameters.
|
||||
<2> The actual increment value is defined by the `increment` method argument that is bound to the `?1` placeholder.
|
||||
<3> Use the Spring Expression Language (SpEL) for parameter binding.
|
||||
<4> Use the `pipeline` attribute to issue xref:mongodb/template-crud-operations.adoc#mongo-template.aggregation-update[aggregation pipeline updates].
|
||||
<5> The update may contain complex objects.
|
||||
<6> Combine a xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[string based query] with an update.
|
||||
====
|
||||
|
||||
WARNING: Repository updates do not emit persistence nor mapping lifecycle events.
|
||||
|
||||
[[mongodb.repositories.queries.delete]]
|
||||
== Delete Methods
|
||||
|
||||
The keywords in the preceding table can be used in conjunction with `delete…By` or `remove…By` to create queries that delete matching documents.
|
||||
|
||||
.`Delete…By` Query
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
List <Person> deleteByLastname(String lastname); <1>
|
||||
|
||||
Long deletePersonByLastname(String lastname); <2>
|
||||
|
||||
@Nullable
|
||||
Person deleteSingleByLastname(String lastname); <3>
|
||||
|
||||
Optional<Person> deleteByBirthdate(Date birthdate); <4>
|
||||
}
|
||||
----
|
||||
<1> Using a return type of `List` retrieves and returns all matching documents before actually deleting them.
|
||||
<2> A numeric return type directly removes the matching documents, returning the total number of documents removed.
|
||||
<3> A single domain type result retrieves and removes the first matching document.
|
||||
<4> Same as in 3 but wrapped in an `Optional` type.
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
Flux<Person> deleteByLastname(String lastname); <1>
|
||||
|
||||
Mono<Long> deletePersonByLastname(String lastname); <2>
|
||||
|
||||
Mono<Person> deleteSingleByLastname(String lastname); <3>
|
||||
}
|
||||
----
|
||||
<1> Using a return type of `Flux` retrieves and returns all matching documents before actually deleting them.
|
||||
<2> A numeric return type directly removes the matching documents, returning the total number of documents removed.
|
||||
<3> A single domain type result retrieves and removes the first matching document.
|
||||
======
|
||||
|
||||
[[mongodb.repositories.queries.aggregation]]
|
||||
== Aggregation Methods
|
||||
|
||||
The repository layer offers means to interact with xref:mongodb/aggregation-framework.adoc[the aggregation framework] via annotated repository query methods.
|
||||
Similar to the xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[JSON based queries], you can define a pipeline using the `org.springframework.data.mongodb.repository.Aggregation` annotation.
|
||||
The definition may contain simple placeholders like `?0` as well as link:{springDocsUrl}/core.html#expressions[SpEL expressions] `?#{ … }`.
|
||||
|
||||
.Aggregating Repository Method
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames(); <1>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames(Sort sort); <2>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
|
||||
List<PersonAggregate> groupByLastnameAnd(String property); <3>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
|
||||
Slice<PersonAggregate> groupByLastnameAnd(String property, Pageable page); <4>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
Stream<PersonAggregate> groupByLastnameAndFirstnamesAsStream(); <5>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
SumValue sumAgeUsingValueWrapper(); <6>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
Long sumAge(); <7>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
AggregationResults<SumValue> sumAgeRaw(); <8>
|
||||
|
||||
@Aggregation("{ '$project': { '_id' : '$lastname' } }")
|
||||
List<String> findAllLastnames(); <9>
|
||||
|
||||
@Aggregation(pipeline = {
|
||||
"{ $group : { _id : '$author', books: { $push: '$title' } } }",
|
||||
"{ $out : 'authors' }"
|
||||
})
|
||||
void groupAndOutSkippingOutput(); <10>
|
||||
}
|
||||
----
|
||||
[source,java]
|
||||
----
|
||||
public class PersonAggregate {
|
||||
|
||||
private @Id String lastname; <2>
|
||||
private List<String> names;
|
||||
|
||||
public PersonAggregate(String lastname, List<String> names) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Getter / Setter omitted
|
||||
}
|
||||
|
||||
public class SumValue {
|
||||
|
||||
private final Long total; <6> <8>
|
||||
|
||||
public SumValue(Long total) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Getter omitted
|
||||
}
|
||||
----
|
||||
<1> Aggregation pipeline to group first names by `lastname` in the `Person` collection returning these as `PersonAggregate`.
|
||||
<2> If `Sort` argument is present, `$sort` is appended after the declared pipeline stages so that it only affects the order of the final results after having passed all other aggregation stages.
|
||||
Therefore, the `Sort` properties are mapped against the methods return type `PersonAggregate` which turns `Sort.by("lastname")` into `{ $sort : { '_id', 1 } }` because `PersonAggregate.lastname` is annotated with `@Id`.
|
||||
<3> Replaces `?0` with the given value for `property` for a dynamic aggregation pipeline.
|
||||
<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination.
|
||||
<5> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`.
|
||||
<6> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`.
|
||||
To gain more control, you might consider `AggregationResult` as method return type as shown in <7>.
|
||||
<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`.
|
||||
<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s.
|
||||
<10> Skips the output of the `$out` stage when return type is `void`.
|
||||
====
|
||||
|
||||
In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk.
|
||||
Use the `@Meta` annotation to set those options via `maxExecutionTimeMs`, `comment` or `allowDiskUse`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Meta(allowDiskUse = true)
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames();
|
||||
}
|
||||
----
|
||||
|
||||
Or use `@Meta` to create your own annotation as shown in the sample below.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.METHOD })
|
||||
@Meta(allowDiskUse = true)
|
||||
@interface AllowDiskUse { }
|
||||
|
||||
interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@AllowDiskUse
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames();
|
||||
}
|
||||
----
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Simple-type single-result inspects the returned `Document` and checks for the following:
|
||||
|
||||
. Only one entry in the document, return it.
|
||||
. Two entries, one is the `_id` value. Return the other.
|
||||
. Return for the first value assignable to the return type.
|
||||
. Throw an exception if none of the above is applicable.
|
||||
====
|
||||
|
||||
WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However, you can use a
|
||||
`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline and let the method return `Slice`.
|
||||
|
||||
[[mongodb.repositories.index-hint]]
|
||||
== Index Hints
|
||||
|
||||
The `@Hint` annotation allows to override MongoDB's default index selection and forces the database to use the specified index instead.
|
||||
|
||||
.Example of index hints
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Hint("lastname-idx") <1>
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
@Query(value = "{ 'firstname' : ?0 }", hint = "firstname-idx") <2>
|
||||
List<Person> findByFirstname(String firstname);
|
||||
----
|
||||
<1> Use the index with name `lastname-idx`.
|
||||
<2> The `@Query` annotation defines the `hint` alias which is equivalent to adding the `@Hint` annotation.
|
||||
====
|
||||
|
||||
For more information about index creation please refer to the xref:mongodb/template-collection-management.adoc[Collection Management] section.
|
||||
|
||||
[[mongo.repositories.collation]]
|
||||
== Repository Collation Support
|
||||
|
||||
Next to the xref:mongodb/collation.adoc[general Collation Support] repositories allow to define the collation for various operations.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query(collation = "en_US") <1>
|
||||
List<Person> findByFirstname(String firstname);
|
||||
|
||||
@Query(collation = "{ 'locale' : 'en_US' }") <2>
|
||||
List<Person> findPersonByFirstname(String firstname);
|
||||
|
||||
@Query(collation = "?1") <3>
|
||||
List<Person> findByFirstname(String firstname, Object collation);
|
||||
|
||||
@Query(collation = "{ 'locale' : '?1' }") <4>
|
||||
List<Person> findByFirstname(String firstname, String collation);
|
||||
|
||||
List<Person> findByFirstname(String firstname, Collation collation); <5>
|
||||
|
||||
@Query(collation = "{ 'locale' : 'en_US' }")
|
||||
List<Person> findByFirstname(String firstname, @Nullable Collation collation); <6>
|
||||
}
|
||||
----
|
||||
<1> Static collation definition resulting in `{ 'locale' : 'en_US' }`.
|
||||
<2> Static collation definition resulting in `{ 'locale' : 'en_US' }`.
|
||||
<3> Dynamic collation depending on 2nd method argument. Allowed types include `String` (eg. 'en_US'), `Locacle` (eg. Locacle.US)
|
||||
and `Document` (eg. new Document("locale", "en_US"))
|
||||
<4> Dynamic collation depending on 2nd method argument.
|
||||
<5> Apply the `Collation` method parameter to the query.
|
||||
<6> The `Collation` method parameter overrides the default `collation` from `@Query` if not null.
|
||||
|
||||
NOTE: In case you enabled the automatic index creation for repository finder methods a potential static collation definition,
|
||||
as shown in (1) and (2), will be included when creating the index.
|
||||
|
||||
TIP: The most specifc `Collation` outrules potentially defined others. Which means Method argument over query method annotation over domain type annotation.
|
||||
====
|
||||
|
||||
To streamline usage of collation attributes throughout the codebase it is also possible to use the `@Collation` annotation, which serves as a meta annotation for the ones mentioned above.
|
||||
The same rules and locations apply, plus, direct usage of `@Collation` supersedes any collation values defined on `@Query` and other annotations.
|
||||
Which means, if a collation is declared via `@Query` and additionally via `@Collation`, then the one from `@Collation` is picked.
|
||||
|
||||
.Using `@Collation`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Collation("en_US") <1>
|
||||
class Game {
|
||||
// ...
|
||||
}
|
||||
|
||||
interface GameRepository extends Repository<Game, String> {
|
||||
|
||||
@Collation("en_GB") <2>
|
||||
List<Game> findByTitle(String title);
|
||||
|
||||
@Collation("de_AT") <3>
|
||||
@Query(collation="en_GB")
|
||||
List<Game> findByDescriptionContaining(String keyword);
|
||||
}
|
||||
----
|
||||
<1> Instead of `@Document(collation=...)`.
|
||||
<2> Instead of `@Query(collation=...)`.
|
||||
<3> Favors `@Collation` over meta usage.
|
||||
====
|
||||
@@ -0,0 +1,386 @@
|
||||
[[mongo.repositories]]
|
||||
= MongoDB Repositories
|
||||
|
||||
[[mongo-repo-intro]]
|
||||
This chapter points out the specialties for repository support for MongoDB.
|
||||
This chapter builds on the core repository support explained in xref:repositories/core-concepts.adoc[core concepts].
|
||||
You should have a sound understanding of the basic concepts explained there.
|
||||
|
||||
[[mongo-repo-usage]]
|
||||
== Usage
|
||||
|
||||
To access domain entities stored in a MongoDB, you can use our sophisticated repository support that eases implementation quite significantly.
|
||||
To do so, create an interface for your repository, as the following example shows:
|
||||
|
||||
.Sample Person entity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
private String firstname;
|
||||
private String lastname;
|
||||
private Address address;
|
||||
|
||||
// … getters and setters omitted
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Note that the domain type shown in the preceding example has a property named `id` of type `String`.The default serialization mechanism used in `MongoTemplate` (which backs the repository support) regards properties named `id` as the document ID.
|
||||
Currently, we support `String`, `ObjectId`, and `BigInteger` as ID types.
|
||||
Please see xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[ID mapping] for more information about on how the `id` field is handled in the mapping layer.
|
||||
|
||||
Now that we have a domain object, we can define an interface that uses it, as follows:
|
||||
|
||||
.Basic repository interface to persist Person entities
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface PersonRepository extends PagingAndSortingRepository<Person, String> {
|
||||
|
||||
// additional custom query methods go here
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface PersonRepository extends ReactiveSortingRepository<Person, String> {
|
||||
|
||||
// additional custom query methods go here
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
To start using the repository, use the `@EnableMongoRepositories` annotation.
|
||||
That annotation carries the same attributes as the namespace element.
|
||||
If no base package is configured, the infrastructure scans the package of the annotated configuration class.
|
||||
The following example shows how to configuration your application to use MongoDB repositories:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableMongoRepositories("com.acme.*.repositories")
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "e-store";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMappingBasePackage() {
|
||||
return "com.acme.*.repositories";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableReactiveMongoRepositories("com.acme.*.repositories")
|
||||
class ApplicationConfig extends AbstractReactiveMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "e-store";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMappingBasePackage() {
|
||||
return "com.acme.*.repositories";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: MongoDB uses two different drivers for imperative (synchronous/blocking) and reactive (non-blocking) data access. You must create a connection by using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support. Consequently, you must provide a separate configuration for MongoDB's Reactive Streams driver. Note that your application operates on two different connections if you use reactive and blocking Spring Data MongoDB templates and repositories.
|
||||
====
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="third"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd">
|
||||
|
||||
<mongo:mongo-client id="mongoClient" />
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg ref="mongoClient" />
|
||||
<constructor-arg value="databaseName" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.*.repositories" />
|
||||
|
||||
</beans>
|
||||
----
|
||||
======
|
||||
|
||||
This namespace element causes the base packages to be scanned for interfaces that extend `MongoRepository` and create Spring beans for each one found.
|
||||
By default, the repositories get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention.
|
||||
|
||||
Because our domain repository extends `PagingAndSortingRepository`, it provides you with methods for paginated and sorted access to the entities.
|
||||
In the case of reactive repositories only `ReactiveSortingRepository` is available since the notion of a `Page` is not applicable.
|
||||
However finder methods still accept a `Sort` and `Limit` parameter.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor].
|
||||
|
||||
Spring Data MongoDB is built on top of the https://mongodb.github.io/mongo-java-driver-reactivestreams/[MongoDB Reactive Streams] driver, to provide maximal interoperability by relying on the https://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs, such as `ReactiveMongoOperations`, are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa), but conversion can easily clutter your code.
|
||||
|
||||
Spring Data's Reactive Repository abstraction is a dynamic API, mostly defined by you and your requirements as you declare query methods. Reactive MongoDB repositories can be implemented by using either RxJava or Project Reactor wrapper types by extending from one of the following library-specific repository interfaces:
|
||||
|
||||
* `ReactiveCrudRepository`
|
||||
* `ReactiveSortingRepository`
|
||||
* `RxJava3CrudRepository`
|
||||
* `RxJava3SortingRepository`
|
||||
|
||||
Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library.
|
||||
====
|
||||
|
||||
In case you want to obtain methods for basic CRUD operations also add the `CrudRepository` interface.
|
||||
Working with the repository instance is just a matter of dependency injecting it into a client .
|
||||
Consequently, accessing the second page of `Person` objects at a page size of 10 would resemble the following code:
|
||||
|
||||
.Paging access to Person entities
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration
|
||||
class PersonRepositoryTests {
|
||||
|
||||
@Autowired PersonRepository repository;
|
||||
|
||||
@Test
|
||||
void readsFirstPageCorrectly() {
|
||||
|
||||
Page<Person> persons = repository.findAll(PageRequest.of(0, 10));
|
||||
assertThat(persons.isFirstPage()).isTrue();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration
|
||||
class PersonRepositoryTests {
|
||||
|
||||
@Autowired PersonRepository repository;
|
||||
|
||||
@Test
|
||||
void readsFirstPageCorrectly() {
|
||||
|
||||
Flux<Person> persons = repository.findAll(Sort.unsorted(), Limit.of(10));
|
||||
|
||||
persons.as(StepVerifer::create)
|
||||
.expectNextCount(10)
|
||||
.verifyComplete();
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases.
|
||||
Inside the test method, we use the repository to query the datastore.
|
||||
We hand the repository a `PageRequest` instance that requests the first page of `Person` objects at a page size of 10.
|
||||
|
||||
[[mongodb.repositories.queries.type-safe]]
|
||||
== Type-safe Query Methods
|
||||
|
||||
MongoDB repository and its reactive counterpart integrates with the http://www.querydsl.com/[Querydsl] project, which provides a way to perform type-safe queries.
|
||||
|
||||
[quote, Querydsl Team]
|
||||
Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API.
|
||||
|
||||
It provides the following features:
|
||||
|
||||
* Code completion in the IDE (all properties, methods, and operations can be expanded in your favorite Java IDE).
|
||||
* Almost no syntactically invalid queries allowed (type-safe on all levels).
|
||||
* Domain types and properties can be referenced safely -- no strings involved!
|
||||
* Adapts better to refactoring changes in domain types.
|
||||
* Incremental query definition is easier.
|
||||
|
||||
See the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] for how to bootstrap your environment for APT-based code generation using Maven or Ant.
|
||||
|
||||
QueryDSL lets you write queries such as the following:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
QPerson person = new QPerson("person");
|
||||
List<Person> result = repository.findAll(person.address.zipCode.eq("C0123"));
|
||||
|
||||
Page<Person> page = repository.findAll(person.lastname.contains("a"),
|
||||
PageRequest.of(0, 2, Direction.ASC, "lastname"));
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
QPerson person = QPerson.person;
|
||||
|
||||
Flux<Person> result = repository.findAll(person.address.zipCode.eq("C0123"));
|
||||
----
|
||||
======
|
||||
|
||||
`QPerson` is a class that is generated by the Java annotation post-processing tool.
|
||||
It is a `Predicate` that lets you write type-safe queries.
|
||||
Notice that there are no strings in the query other than the `C0123` value.
|
||||
|
||||
You can use the generated `Predicate` class by using the `QuerydslPredicateExecutor` / `ReactiveQuerydslPredicateExecutor` interface, which the following listing shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface QuerydslPredicateExecutor<T> {
|
||||
|
||||
T findOne(Predicate predicate);
|
||||
|
||||
List<T> findAll(Predicate predicate);
|
||||
|
||||
List<T> findAll(Predicate predicate, Sort sort);
|
||||
|
||||
List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders);
|
||||
|
||||
Page<T> findAll(Predicate predicate, Pageable pageable);
|
||||
|
||||
List<T> findAll(OrderSpecifier<?>... orders);
|
||||
|
||||
Long count(Predicate predicate);
|
||||
|
||||
Boolean exists(Predicate predicate);
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
interface ReactiveQuerydslPredicateExecutor<T> {
|
||||
|
||||
Mono<T> findOne(Predicate predicate);
|
||||
|
||||
Flux<T> findAll(Predicate predicate);
|
||||
|
||||
Flux<T> findAll(Predicate predicate, Sort sort);
|
||||
|
||||
Flux<T> findAll(Predicate predicate, OrderSpecifier<?>... orders);
|
||||
|
||||
Flux<T> findAll(OrderSpecifier<?>... orders);
|
||||
|
||||
Mono<Long> count(Predicate predicate);
|
||||
|
||||
Mono<Boolean> exists(Predicate predicate);
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
To use this in your repository implementation, add it to the list of repository interfaces from which your interface inherits, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
interface PersonRepository extends MongoRepository<Person, String>, QuerydslPredicateExecutor<Person> {
|
||||
|
||||
// additional query methods go here
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String>, ReactiveQuerydslPredicateExecutor<Person> {
|
||||
|
||||
// additional query methods go here
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: Please note that joins (DBRef's) are not supported with Reactive MongoDB support.
|
||||
====
|
||||
======
|
||||
|
||||
[[mongodb.repositories.queries.full-text]]
|
||||
== Full-text Search Queries
|
||||
|
||||
MongoDB's full-text search feature is store-specific and, therefore, can be found on `MongoRepository` rather than on the more general `CrudRepository`.
|
||||
We need a document with a full-text index (see "`xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Indexes]`" to learn how to create a full-text index).
|
||||
|
||||
Additional methods on `MongoRepository` take `TextCriteria` as an input parameter.
|
||||
In addition to those explicit methods, it is also possible to add a `TextCriteria`-derived repository method.
|
||||
The criteria are added as an additional `AND` criteria.
|
||||
Once the entity contains a `@TextScore`-annotated property, the document's full-text score can be retrieved.
|
||||
Furthermore, the `@TextScore` annotated also makes it possible to sort by the document's score, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
class FullTextDocument {
|
||||
|
||||
@Id String id;
|
||||
@TextIndexed String title;
|
||||
@TextIndexed String content;
|
||||
@TextScore Float score;
|
||||
}
|
||||
|
||||
interface FullTextRepository extends Repository<FullTextDocument, String> {
|
||||
|
||||
// Execute a full-text search and define sorting dynamically
|
||||
List<FullTextDocument> findAllBy(TextCriteria criteria, Sort sort);
|
||||
|
||||
// Paginate over a full-text search result
|
||||
Page<FullTextDocument> findAllBy(TextCriteria criteria, Pageable pageable);
|
||||
|
||||
// Combine a derived query with a full-text search
|
||||
List<FullTextDocument> findByTitleOrderByScoreDesc(String title, TextCriteria criteria);
|
||||
}
|
||||
|
||||
|
||||
Sort sort = Sort.by("score");
|
||||
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data");
|
||||
List<FullTextDocument> result = repository.findAllBy(criteria, sort);
|
||||
|
||||
criteria = TextCriteria.forDefaultLanguage().matching("film");
|
||||
Page<FullTextDocument> page = repository.findAllBy(criteria, PageRequest.of(1, 1, sort));
|
||||
List<FullTextDocument> result = repository.findByTitleOrderByScoreDesc("mongodb", criteria);
|
||||
----
|
||||
@@ -1,6 +1,6 @@
|
||||
// carry over the old bookmarks to prevent external links from failing
|
||||
[[tailable-cursors]]
|
||||
== [[mongo.reactive.repositories.infinite-streams]] Infinite Streams with Tailable Cursors
|
||||
= Tailable Cursors
|
||||
|
||||
By default, MongoDB automatically closes a cursor when the client exhausts all results supplied by the cursor.
|
||||
Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections],
|
||||
@@ -14,7 +14,7 @@ reactive variant, as it is less resource-intensive. However, if you cannot use t
|
||||
concept that is already prevalent in the Spring ecosystem.
|
||||
|
||||
[[tailable-cursors.sync]]
|
||||
=== Tailable Cursors with `MessageListener`
|
||||
== Tailable Cursors with `MessageListener`
|
||||
|
||||
Listening to a capped collection using a Sync Driver creates a long running, blocking task that needs to be delegated to
|
||||
a separate component. In this case, we need to first create a `MessageListenerContainer`, which will be the main entry point
|
||||
@@ -50,11 +50,11 @@ container.stop();
|
||||
<4> Provide an optional filter for documents to receive.
|
||||
<5> Set the message listener to publish incoming ``Message``s to.
|
||||
<6> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel it to free resources.
|
||||
<5> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container.
|
||||
<7> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container.
|
||||
====
|
||||
|
||||
[[tailable-cursors.reactive]]
|
||||
=== Reactive Tailable Cursors
|
||||
== Reactive Tailable Cursors
|
||||
|
||||
Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection.
|
||||
|
||||
164
src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc
Normal file
164
src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc
Normal file
@@ -0,0 +1,164 @@
|
||||
[[mongo-template]]
|
||||
= Template API
|
||||
|
||||
The `MongoTemplate` and its reactive counterpart class, located in the `org.springframework.data.mongodb.core` package, is the central class of Spring's MongoDB support and provides a rich feature set for interacting with the database.
|
||||
The template offers convenience operations to create, update, delete, and query MongoDB documents and provides a mapping between your domain objects and MongoDB documents.
|
||||
|
||||
NOTE: Once configured, `MongoTemplate` is thread-safe and can be reused across multiple instances.
|
||||
|
||||
[[mongo-template.convenience-methods]]
|
||||
== Convenience Methods
|
||||
|
||||
The `MongoTemplate` class implements the interface `MongoOperations`.
|
||||
In as much as possible, the methods on `MongoOperations` are named after methods available on the MongoDB driver `Collection` object, to make the API familiar to existing MongoDB developers who are used to the driver API.
|
||||
For example, you can find methods such as `find`, `findAndModify`, `findAndReplace`, `findOne`, `insert`, `remove`, `save`, `update`, and `updateMulti`.
|
||||
The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `MongoOperations`.
|
||||
A major difference between the two APIs is that `MongoOperations` can be passed domain objects instead of `Document`.
|
||||
Also, `MongoOperations` has fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations.
|
||||
|
||||
For more information please refer to the the xref:mongodb/template-crud-operations.adoc[CRUD] and xref:mongodb/template-query-operations.adoc[Query] sections of the documentation.
|
||||
|
||||
NOTE: The preferred way to reference the operations on `MongoTemplate` instance is through its interface, `MongoOperations`.
|
||||
|
||||
[[mongo-template.execute-callbacks]]
|
||||
== Execute Callbacks
|
||||
|
||||
`MongoTemplate` offers many convenience methods to help you easily perform common tasks.
|
||||
However, if you need to directly access the MongoDB driver API, you can use one of several `Execute` callback methods.
|
||||
The `execute` callbacks gives you a reference to either a `MongoCollection` or a `MongoDatabase` object.
|
||||
|
||||
* `<T> T` *execute* `(Class<?> entityClass, CollectionCallback<T> action)`: Runs the given `CollectionCallback` for the entity collection of the specified class.
|
||||
|
||||
* `<T> T` *execute* `(String collectionName, CollectionCallback<T> action)`: Runs the given `CollectionCallback` on the collection of the given name.
|
||||
|
||||
* `<T> T` *execute* `(DbCallback<T> action)`: Runs a DbCallback, translating any exceptions as necessary. Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
* `<T> T` *execute* `(String collectionName, DbCallback<T> action)`: Runs a `DbCallback` on the collection of the given name translating any exceptions as necessary.
|
||||
|
||||
* `<T> T` *executeInSession* `(DbCallback<T> action)`: Runs the given `DbCallback` within the same connection to the database so as to ensure consistency in a write-heavy environment where you may read the data that you wrote.
|
||||
|
||||
The following example uses the `CollectionCallback` to return information about an index:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
boolean hasIndex = template.execute("geolocation", collection ->
|
||||
Streamable.of(collection.listIndexes(org.bson.Document.class))
|
||||
.stream()
|
||||
.map(document -> document.get("name"))
|
||||
.anyMatch("location_2d"::equals)
|
||||
);
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<Boolean> hasIndex = template.execute("geolocation", collection ->
|
||||
Flux.from(collection.listIndexes(org.bson.Document.class))
|
||||
.map(document -> document.get("name"))
|
||||
.filterWhen(name -> Mono.just("location_2d".equals(name)))
|
||||
.map(it -> Boolean.TRUE)
|
||||
.single(Boolean.FALSE)
|
||||
).next();
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo-template.fluent-api]]
|
||||
== Fluent API
|
||||
|
||||
Being the central component when it comes to more low-level interaction with MongoDB `MongoTemplate` offers a wide range of methods covering needs from collection creation, index creation, and CRUD operations to more advanced functionality, such as Map-Reduce and aggregations.
|
||||
You can find multiple overloads for each method.
|
||||
Most of them cover optional or nullable parts of the API.
|
||||
|
||||
`FluentMongoOperations` provides a more narrow interface for the common methods of `MongoOperations` and provides a more readable, fluent API.
|
||||
The entry points (`insert(…)`, `find(…)`, `update(…)`, and others) follow a natural naming schema based on the operation to be run.
|
||||
Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that invokes the actual `MongoOperations` counterpart -- the `all` method in the case of the following example:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
List<Jedi> all = template.query(SWCharacter.class) <1>
|
||||
.inCollection("star-wars") <2>
|
||||
.as(Jedi.class) <3>
|
||||
.matching(query(where("jedi").is(true))) <4>
|
||||
.all();
|
||||
----
|
||||
<1> The type used to map fields used in the query to.
|
||||
<2> The collection name to use if not defined on the domain type.
|
||||
<3> Result type if not using the original domain type.
|
||||
<4> The lookup query.
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Flux<Jedi> all = template.query(SWCharacter.class)
|
||||
.inCollection("star-wars")
|
||||
.as(Jedi.class)
|
||||
.matching(query(where("jedi").is(true)))
|
||||
.all();
|
||||
----
|
||||
======
|
||||
|
||||
NOTE: Using projections allows `MongoTemplate` to optimize result mapping by limiting the actual response to fields required
|
||||
by the projection target type. This applies as long as the `Query` itself does not contain any field restriction and the
|
||||
target type is a closed interface or DTO projection.
|
||||
|
||||
WARNING: Projections must not be applied to xref:mongodb/mapping/document-references.adoc[DBRefs].
|
||||
|
||||
You can switch between retrieving a single entity and retrieving multiple entities as a `List` or a `Stream` through the terminating methods: `first()`, `one()`, `all()`, or `stream()`.
|
||||
|
||||
When writing a geo-spatial query with `near(NearQuery)`, the number of terminating methods is altered to include only the methods that are valid for running a `geoNear` command in MongoDB (fetching entities as a `GeoResult` within `GeoResults`), as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
GeoResults<Jedi> results = template.query(SWCharacter.class)
|
||||
.as(Jedi.class)
|
||||
.near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis…
|
||||
.all();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Flux<GeoResult<Jedi>> results = template.query(SWCharacter.class)
|
||||
.as(Jedi.class)
|
||||
.near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis…
|
||||
.all();
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo-template.exception-translation]]
|
||||
== Exception Translation
|
||||
|
||||
The Spring framework provides exception translation for a wide variety of database and mapping technologies. T
|
||||
his has traditionally been for JDBC and JPA.
|
||||
The Spring support for MongoDB extends this feature to the MongoDB Database by providing an implementation of the `org.springframework.dao.support.PersistenceExceptionTranslator` interface.
|
||||
|
||||
The motivation behind mapping to Spring's link:{springDocsUrl}/data-access.html#dao-exceptions[consistent data access exception hierarchy] is that you are then able to write portable and descriptive exception handling code without resorting to coding against MongoDB error codes.
|
||||
All of Spring's data access exceptions are inherited from the root `DataAccessException` class so that you can be sure to catch all database related exception within a single try-catch block.
|
||||
Note that not all exceptions thrown by the MongoDB driver inherit from the `MongoException` class.
|
||||
The inner exception and message are preserved so that no information is lost.
|
||||
|
||||
Some of the mappings performed by the `MongoExceptionTranslator` are `com.mongodb.Network to DataAccessResourceFailureException` and `MongoException` error codes 1003, 12001, 12010, 12011, and 12012 to `InvalidDataAccessApiUsageException`.
|
||||
Look into the implementation for more details on the mapping.
|
||||
|
||||
[[mongo-template.type-mapping]]
|
||||
== Domain Type Mapping
|
||||
|
||||
The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the `MongoConverter` interface.
|
||||
Spring provides `MappingMongoConverter`, but you can also write your own converter.
|
||||
While the `MappingMongoConverter` can use additional metadata to specify the mapping of objects to documents, it can also convert objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names.
|
||||
These conventions, as well as the use of mapping annotations, are explained in the xref:mongodb/mapping/mapping.adoc[Mapping] chapter.
|
||||
@@ -0,0 +1,205 @@
|
||||
[[mongo-template.index-and-collections]]
|
||||
= Index and Collection Management
|
||||
|
||||
`MongoTemplate` and `ReactiveMongoTemplate` provide methods for managing indexes and collections.
|
||||
These methods are collected into a helper interface called `IndexOperations` respectively `ReactiveIndexOperations`.
|
||||
You can access these operations by calling the `indexOps` method and passing in either the collection name or the `java.lang.Class` of your entity (the collection name is derived from the `.class`, either by name or from annotation metadata).
|
||||
|
||||
The following listing shows the `IndexOperations` interface:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public interface IndexOperations {
|
||||
|
||||
void ensureIndex(IndexDefinition indexDefinition);
|
||||
|
||||
void alterIndex(String name);
|
||||
|
||||
void dropIndex(String name);
|
||||
|
||||
void dropAllIndexes();
|
||||
|
||||
void resetIndexCache();
|
||||
|
||||
List<IndexInfo> getIndexInfo();
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public interface ReactiveIndexOperations {
|
||||
|
||||
Mono<String> ensureIndex(IndexDefinition indexDefinition);
|
||||
|
||||
Mono<Void> alterIndex(String name, IndexOptions options);
|
||||
|
||||
Mono<Void> dropIndex(String name);
|
||||
|
||||
Mono<Void> dropAllIndexes();
|
||||
|
||||
Flux<IndexInfo> getIndexInfo();
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo-template.index-and-collections.index]]
|
||||
== Methods for Creating an Index
|
||||
|
||||
You can create an index on a collection to improve query performance by using the MongoTemplate class, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
template.indexOps(Person.class)
|
||||
.ensureIndex(new Index().on("name",Order.ASCENDING));
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<String> createIndex = template.indexOps(Person.class)
|
||||
.ensureIndex(new Index().on("name",Order.ASCENDING));
|
||||
----
|
||||
======
|
||||
|
||||
`ensureIndex` makes sure that an index for the provided IndexDefinition exists for the collection.
|
||||
|
||||
You can create standard, geospatial, and text indexes by using the `IndexDefinition`, `GeoSpatialIndex` and `TextIndexDefinition` classes.
|
||||
For example, given the `Venue` class defined in a previous section, you could declare a geospatial query, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
template.indexOps(Venue.class)
|
||||
.ensureIndex(new GeospatialIndex("location"));
|
||||
----
|
||||
|
||||
NOTE: `Index` and `GeospatialIndex` support configuration of xref:mongodb/template-query-operations.adoc#mongo.query.collation[collations].
|
||||
|
||||
[[mongo-template.index-and-collections.access]]
|
||||
== Accessing Index Information
|
||||
|
||||
The `IndexOperations` interface has the `getIndexInfo` method that returns a list of `IndexInfo` objects.
|
||||
This list contains all the indexes defined on the collection. The following example defines an index on the `Person` class that has an `age` property:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
template.indexOps(Person.class)
|
||||
.ensureIndex(new Index().on("age", Order.DESCENDING).unique());
|
||||
|
||||
List<IndexInfo> indexInfoList = template.indexOps(Person.class)
|
||||
.getIndexInfo();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<String> ageIndex = template.indexOps(Person.class)
|
||||
.ensureIndex(new Index().on("age", Order.DESCENDING).unique());
|
||||
|
||||
Flux<IndexInfo> indexInfo = ageIndex.then(template.indexOps(Person.class)
|
||||
.getIndexInfo());
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo-template.index-and-collections.collection]]
|
||||
== Methods for Working with a Collection
|
||||
|
||||
The following example shows how to create a collection:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
MongoCollection<Document> collection = null;
|
||||
if (!template.getCollectionNames().contains("MyNewCollection")) {
|
||||
collection = mongoTemplate.createCollection("MyNewCollection");
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
MongoCollection<Document> collection = template.getCollectionNames().collectList()
|
||||
.flatMap(collectionNames -> {
|
||||
if(!collectionNames.contains("MyNewCollection")) {
|
||||
return template.createCollection("MyNewCollection");
|
||||
}
|
||||
return template.getMongoDatabase().map(db -> db.getCollection("MyNewCollection"));
|
||||
});
|
||||
----
|
||||
======
|
||||
|
||||
NOTE: Collection creation allows customization with `CollectionOptions` and supports xref:mongodb/collation.adoc[collations].
|
||||
|
||||
.Methods to interact with MongoCollections
|
||||
[%collapsible]
|
||||
====
|
||||
* *getCollectionNames*: Returns a set of collection names.
|
||||
* *collectionExists*: Checks to see if a collection with a given name exists.
|
||||
* *createCollection*: Creates an uncapped collection.
|
||||
* *dropCollection*: Drops the collection.
|
||||
* *getCollection*: Gets a collection by name, creating it if it does not exist.
|
||||
====
|
||||
|
||||
[[time-series]]
|
||||
== Time Series
|
||||
|
||||
MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events.
|
||||
Those collections need to be created as such before inserting any data.
|
||||
Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below.
|
||||
|
||||
.Create a Time Series Collection
|
||||
====
|
||||
.Create a Time Series via the MongoDB Driver
|
||||
[source,java]
|
||||
----
|
||||
template.execute(db -> {
|
||||
|
||||
com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions();
|
||||
options.timeSeriesOptions(new TimeSeriesOptions("timestamp"));
|
||||
|
||||
db.createCollection("weather", options);
|
||||
return "OK";
|
||||
});
|
||||
----
|
||||
|
||||
.Create a Time Series Collection with `CollectionOptions`
|
||||
[source,java]
|
||||
----
|
||||
template.createCollection("weather", CollectionOptions.timeSeries("timestamp"));
|
||||
----
|
||||
|
||||
.Create a Time Series Collection derived from an Annotation
|
||||
[source,java]
|
||||
----
|
||||
@TimeSeries(collection="weather", timeField = "timestamp")
|
||||
public class Measurement {
|
||||
|
||||
String id;
|
||||
Instant timestamp;
|
||||
// ...
|
||||
}
|
||||
|
||||
template.createCollection(Measurement.class);
|
||||
----
|
||||
====
|
||||
|
||||
The snippets above can easily be transferred to the reactive API offering the very same methods.
|
||||
Make sure to properly _subscribe_ to the returned publishers.
|
||||
167
src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc
Normal file
167
src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc
Normal file
@@ -0,0 +1,167 @@
|
||||
[[mongo-template.instantiating]]
|
||||
= Configuration
|
||||
|
||||
You can use the following configuration to create and register an instance of `MongoTemplate`, as the following example shows:
|
||||
|
||||
.Registering a `MongoClient` object and enabling Spring's exception translation support
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
class ApplicationConfiguration {
|
||||
|
||||
@Bean
|
||||
MongoClient mongoClient() {
|
||||
return MongoClients.create("mongodb://localhost:27017");
|
||||
}
|
||||
|
||||
@Bean
|
||||
MongoOperations mongoTemplate(MongoClient mongoClient) {
|
||||
return new MongoTemplate(mongoClient, "geospatial");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Configuration
|
||||
class ReactiveApplicationConfiguration {
|
||||
|
||||
@Bean
|
||||
MongoClient mongoClient() {
|
||||
return MongoClients.create("mongodb://localhost:27017");
|
||||
}
|
||||
|
||||
@Bean
|
||||
ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) {
|
||||
return new ReactiveMongoTemplate(mongoClient, "geospatial");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="third"]
|
||||
----
|
||||
<mongo:mongo-client host="localhost" port="27017" />
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg ref="mongoClient" />
|
||||
<constructor-arg name="databaseName" value="geospatial" />
|
||||
</bean>
|
||||
----
|
||||
======
|
||||
|
||||
There are several overloaded constructors of `MongoTemplate` and `ReactiveMongoTemplate`:
|
||||
|
||||
* `MongoTemplate(MongoClient mongo, String databaseName)`: Takes the `MongoClient` object and the default database name to operate against.
|
||||
* `MongoTemplate(MongoDatabaseFactory mongoDbFactory)`: Takes a MongoDbFactory object that encapsulated the `MongoClient` object, database name, and username and password.
|
||||
* `MongoTemplate(MongoDatabaseFactory mongoDbFactory, MongoConverter mongoConverter)`: Adds a `MongoConverter` to use for mapping.
|
||||
|
||||
Other optional properties that you might like to set when creating a `MongoTemplate` / `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, `ReadPreference` and others listed below.
|
||||
|
||||
[[mongo-template.read-preference]]
|
||||
== Default Read Preference
|
||||
|
||||
The default read preference applied to read operations if no other preference was defined via the xref:mongodb/template-query-operations.adoc#mongo.query.read-preference[Query].
|
||||
|
||||
[[mongo-template.writeresultchecking]]
|
||||
== WriteResultChecking Policy
|
||||
|
||||
When in development, it is handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully when, in fact, the database was not modified according to your expectations. You can set the `WriteResultChecking` property of `MongoTemplate` to one of the following values: `EXCEPTION` or `NONE`, to either throw an `Exception` or do nothing, respectively. The default is to use a `WriteResultChecking` value of `NONE`.
|
||||
|
||||
[[mongo-template.writeconcern]]
|
||||
== Default WriteConcern
|
||||
|
||||
If it has not yet been specified through the driver at a higher level (such as `com.mongodb.client.MongoClient`), you can set the `com.mongodb.WriteConcern` property that the `MongoTemplate` uses for write operations. If the `WriteConcern` property is not set, it defaults to the one set in the MongoDB driver's DB or Collection setting.
|
||||
|
||||
[[mongo-template.writeconcernresolver]]
|
||||
== WriteConcernResolver
|
||||
|
||||
For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert, and save operations), a strategy interface called `WriteConcernResolver` can be configured on `MongoTemplate`. Since `MongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The following listing shows the `WriteConcernResolver` interface:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface WriteConcernResolver {
|
||||
WriteConcern resolve(MongoAction action);
|
||||
}
|
||||
----
|
||||
|
||||
You can use the `MongoAction` argument to determine the `WriteConcern` value or use the value of the Template itself as a default.
|
||||
`MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `Document`, the operation (`REMOVE`, `UPDATE`, `INSERT`, `INSERT_LIST`, or `SAVE`), and a few other pieces of contextual information.
|
||||
The following example shows two sets of classes getting different `WriteConcern` settings:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class MyAppWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
@Override
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
if (action.getEntityType().getSimpleName().contains("Audit")) {
|
||||
return WriteConcern.ACKNOWLEDGED;
|
||||
} else if (action.getEntityType().getSimpleName().contains("Metadata")) {
|
||||
return WriteConcern.JOURNALED;
|
||||
}
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
[[mongo-template.entity-lifecycle-events]]
|
||||
== Publish entity lifecycle events
|
||||
|
||||
The template publishes xref:mongodb/lifecycle-events.adoc#mongodb.mapping-usage.events[lifecycle events].
|
||||
In case there are no listeners present, this feature can be disabled.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Bean
|
||||
MongoOperations mongoTemplate(MongoClient mongoClient) {
|
||||
MongoTemplate template = new MongoTemplate(mongoClient, "geospatial");
|
||||
template.setEntityLifecycleEventsEnabled(false);
|
||||
// ...
|
||||
}
|
||||
----
|
||||
|
||||
[[mongo-template.entity-callbacks-config]]
|
||||
== Configure EntityCallbacks
|
||||
|
||||
Nest to lifecycle events the template invokes xref:mongodb/lifecycle-events.adoc#mongo.entity-callbacks[EntityCallbacks] which can be (if not auto configured) set via the template API.
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
@Bean
|
||||
MongoOperations mongoTemplate(MongoClient mongoClient) {
|
||||
MongoTemplate template = new MongoTemplate(mongoClient, "...");
|
||||
template.setEntityCallbacks(EntityCallbacks.create(...));
|
||||
// ...
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
@Bean
|
||||
ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) {
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "...");
|
||||
template.setEntityCallbacks(ReactiveEntityCallbacks.create(...));
|
||||
// ...
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
[[mongo-template.count-documents-config]]
|
||||
== Document count configuration
|
||||
|
||||
By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session].
|
||||
Please refer to to the xref:mongodb/template-document-count.adoc#mongo.query.count[Counting Documents] section for more information.
|
||||
@@ -0,0 +1,632 @@
|
||||
[[mongo-template.save-update-remove]]
|
||||
= Saving, Updating, and Removing Documents
|
||||
|
||||
`MongoTemplate` / `ReactiveMongoTemplatge` let you save, update, and delete your domain objects and map those objects to documents stored in MongoDB.
|
||||
The API signatures of the imperative and reactive API are mainly the same only differing in their return types.
|
||||
While the synchronous API uses `void`, single `Object` and `List` the reactive counterpart consists of `Mono<Void>`, `Mono<Object>` and `Flux`.
|
||||
|
||||
Consider the following class:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::example$example/Person.java[tags=class]
|
||||
----
|
||||
|
||||
Given the `Person` class in the preceding example, you can save, update and delete the object, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
public class MongoApplication {
|
||||
|
||||
private static final Log log = LogFactory.getLog(MongoApplication.class);
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
MongoOperations template = new MongoTemplate(new SimpleMongoClientDbFactory(MongoClients.create(), "database"));
|
||||
|
||||
Person p = new Person("Joe", 34);
|
||||
|
||||
// Insert is used to initially store the object into the database.
|
||||
template.insert(p);
|
||||
log.info("Insert: " + p);
|
||||
|
||||
// Find
|
||||
p = template.findById(p.getId(), Person.class);
|
||||
log.info("Found: " + p);
|
||||
|
||||
// Update
|
||||
template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class);
|
||||
p = template.findOne(query(where("name").is("Joe")), Person.class);
|
||||
log.info("Updated: " + p);
|
||||
|
||||
// Delete
|
||||
template.remove(p);
|
||||
|
||||
// Check that deletion worked
|
||||
List<Person> people = template.findAll(Person.class);
|
||||
log.info("Number of people = : " + people.size());
|
||||
|
||||
|
||||
template.dropCollection(Person.class);
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The preceding example would produce the following log output (including debug messages from `MongoTemplate`):
|
||||
|
||||
[source]
|
||||
----
|
||||
DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information.
|
||||
DEBUG work.data.mongodb.core.MongoTemplate: 632 - insert Document containing fields: [_class, age, name] in collection: person
|
||||
INFO org.spring.example.MongoApp: 30 - Insert: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34]
|
||||
DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "_id" : { "$oid" : "4ddc6e784ce5b1eba3ceaf5c"}} in db.collection: database.person
|
||||
INFO org.spring.example.MongoApp: 34 - Found: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34]
|
||||
DEBUG work.data.mongodb.core.MongoTemplate: 778 - calling update using query: { "name" : "Joe"} and update: { "$set" : { "age" : 35}} in collection: person
|
||||
DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "name" : "Joe"} in db.collection: database.person
|
||||
INFO org.spring.example.MongoApp: 39 - Updated: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=35]
|
||||
DEBUG work.data.mongodb.core.MongoTemplate: 823 - remove using query: { "id" : "4ddc6e784ce5b1eba3ceaf5c"} in collection: person
|
||||
INFO org.spring.example.MongoApp: 46 - Number of people = : 0
|
||||
DEBUG work.data.mongodb.core.MongoTemplate: 376 - Dropped collection [database.person]
|
||||
----
|
||||
====
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
public class ReactiveMongoApplication {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApplication.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(MongoClients.create(), "database");
|
||||
|
||||
template.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person))
|
||||
.flatMap(person -> template.findById(person.getId(), Person.class))
|
||||
.doOnNext(person -> log.info("Found: " + person))
|
||||
.zipWith(person -> template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class))
|
||||
.flatMap(tuple -> template.remove(tuple.getT1())).flatMap(deleteResult -> template.findAll(Person.class))
|
||||
.count().doOnSuccess(count -> {
|
||||
log.info("Number of people: " + count);
|
||||
latch.countDown();
|
||||
})
|
||||
|
||||
.subscribe();
|
||||
|
||||
latch.await();
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
`MongoConverter` caused implicit conversion between a `String` and an `ObjectId` stored in the database by recognizing (through convention) the `Id` property name.
|
||||
|
||||
The preceding example is meant to show the use of save, update, and remove operations on `MongoTemplate` / `ReactiveMongoTemplate` and not to show complex mapping functionality.
|
||||
The query syntax used in the preceding example is explained in more detail in the section "`xref:mongodb/template-query-operations.adoc[Querying Documents]`".
|
||||
|
||||
IMPORTANT: MongoDB requires that you have an `_id` field for all documents. Please refer to the xref:mongodb/template-crud-operations.adoc[ID handling] section for details on the special treatment of this field.
|
||||
|
||||
IMPORTANT: MongoDB collections can contain documents that represent instances of a variety of types. Please refer to the xref:mongodb/converters-type-mapping.adoc[type mapping] for details.
|
||||
|
||||
[[mongo-template.save-insert]]
|
||||
== Insert / Save
|
||||
|
||||
There are several convenient methods on `MongoTemplate` for saving and inserting your objects.
|
||||
To have more fine-grained control over the conversion process, you can register Spring converters with the `MappingMongoConverter` -- for example `Converter<Person, Document>` and `Converter<Document, Person>`.
|
||||
|
||||
NOTE: The difference between insert and save operations is that a save operation performs an insert if the object is not already present.
|
||||
|
||||
The simple case of using the save operation is to save a POJO.
|
||||
In this case, the collection name is determined by name (not fully qualified) of the class.
|
||||
You may also call the save operation with a specific collection name. You can use mapping metadata to override the collection in which to store the object.
|
||||
|
||||
When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database.
|
||||
Consequently, for auto-generation of an `ObjectId` to succeed, the type of the `Id` property or field in your class must be a `String`, an `ObjectId`, or a `BigInteger`.
|
||||
|
||||
The following example shows how to save a document and retrieving its contents:
|
||||
|
||||
.Inserting and retrieving documents using the MongoTemplate
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.query;
|
||||
|
||||
//...
|
||||
|
||||
template.insert(new Person("Bob", 33));
|
||||
|
||||
Person person = template.query(Person.class)
|
||||
.matching(query(where("age").is(33)))
|
||||
.oneValue();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.query;
|
||||
|
||||
//...
|
||||
|
||||
Mono<Person> person = mongoTemplate.insert(new Person("Bob", 33))
|
||||
.then(mongoTemplate.query(Person.class)
|
||||
.matching(query(where("age").is(33)))
|
||||
.one());
|
||||
----
|
||||
======
|
||||
|
||||
The following insert and save operations are available:
|
||||
|
||||
* `void` *save* `(Object objectToSave)`: Save the object to the default collection.
|
||||
* `void` *save* `(Object objectToSave, String collectionName)`: Save the object to the specified collection.
|
||||
|
||||
A similar set of insert operations is also available:
|
||||
|
||||
* `void` *insert* `(Object objectToSave)`: Insert the object to the default collection.
|
||||
* `void` *insert* `(Object objectToSave, String collectionName)`: Insert the object to the specified collection.
|
||||
|
||||
[[mongo-template.id-handling]]
|
||||
=== How the `_id` Field is Handled in the Mapping Layer
|
||||
|
||||
MongoDB requires that you have an `_id` field for all documents.
|
||||
If you do not provide one, the driver assigns an `ObjectId` with a generated value. When you use the `MappingMongoConverter`, certain rules govern how properties from the Java class are mapped to this `_id` field:
|
||||
|
||||
. A property or field annotated with `@Id` (`org.springframework.data.annotation.Id`) maps to the `_id` field.
|
||||
. A property or field without an annotation but named `id` maps to the `_id` field.
|
||||
|
||||
The following outlines what type conversion, if any, is done on the property mapped to the `_id` document field when using the `MappingMongoConverter` (the default for `MongoTemplate`).
|
||||
|
||||
. If possible, an `id` property or field declared as a `String` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter<String, ObjectId>`. Valid conversion rules are delegated to the MongoDB Java driver. If it cannot be converted to an `ObjectId`, then the value is stored as a string in the database.
|
||||
. An `id` property or field declared as `BigInteger` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter<BigInteger, ObjectId>`.
|
||||
|
||||
If no field or property specified in the previous sets of rules is present in the Java class, an implicit `_id` file is generated by the driver but not mapped to a property or field of the Java class.
|
||||
|
||||
When querying and updating, `MongoTemplate` uses the converter that corresponds to the preceding rules for saving documents so that field names and types used in your queries can match what is in your domain classes.
|
||||
|
||||
Some environments require a customized approach to map `Id` values such as data stored in MongoDB that did not run through the Spring Data mapping layer. Documents can contain `_id` values that can be represented either as `ObjectId` or as `String`.
|
||||
Reading documents from the store back to the domain type works just fine. Querying for documents via their `id` can be cumbersome due to the implicit `ObjectId` conversion. Therefore documents cannot be retrieved that way.
|
||||
For those cases `@MongoId` provides more control over the actual id mapping attempts.
|
||||
|
||||
.`@MongoId` mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class PlainStringId {
|
||||
@MongoId String id; <1>
|
||||
}
|
||||
|
||||
public class PlainObjectId {
|
||||
@MongoId ObjectId id; <2>
|
||||
}
|
||||
|
||||
public class StringToObjectId {
|
||||
@MongoId(FieldType.OBJECT_ID) String id; <3>
|
||||
}
|
||||
----
|
||||
<1> The id is treated as `String` without further conversion.
|
||||
<2> The id is treated as `ObjectId`.
|
||||
<3> The id is treated as `ObjectId` if the given `String` is a valid `ObjectId` hex, otherwise as `String`. Corresponds to `@Id` usage.
|
||||
====
|
||||
|
||||
|
||||
[[mongo-template.save-insert.collection]]
|
||||
=== Into Which Collection Are My Documents Saved?
|
||||
|
||||
There are two ways to manage the collection name that is used for the documents.
|
||||
The default collection name that is used is the class name changed to start with a lower-case letter.
|
||||
So a `com.test.Person` class is stored in the `person` collection.
|
||||
You can customize this by providing a different collection name with the `@Document` annotation.
|
||||
You can also override the collection name by providing your own collection name as the last parameter for the selected `MongoTemplate` method calls.
|
||||
|
||||
[[mongo-template.save-insert.individual]]
|
||||
=== Inserting or Saving Individual Objects
|
||||
|
||||
The MongoDB driver supports inserting a collection of documents in a single operation.
|
||||
The following methods in the `MongoOperations` interface support this functionality:
|
||||
|
||||
* *insert*: Inserts an object. If there is an existing document with the same `id`, an error is generated.
|
||||
* *insertAll*: Takes a `Collection` of objects as the first parameter. This method inspects each object and inserts it into the appropriate collection, based on the rules specified earlier.
|
||||
* *save*: Saves the object, overwriting any object that might have the same `id`.
|
||||
|
||||
[[mongo-template.save-insert.batch]]
|
||||
=== Inserting Several Objects in a Batch
|
||||
|
||||
The MongoDB driver supports inserting a collection of documents in one operation.
|
||||
The following methods in the `MongoOperations` interface support this functionality via `insert` or a dedicated `BulkOperations` interface.
|
||||
|
||||
.Batch Insert
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
Collection<Person> inserted = template.insert(List.of(...), Person.class);
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Flux<Person> inserted = template.insert(List.of(...), Person.class);
|
||||
----
|
||||
======
|
||||
|
||||
.Bulk Insert
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
BulkWriteResult result = template.bulkOps(BulkMode.ORDERED, Person.class)
|
||||
.insert(List.of(...))
|
||||
.execute();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<BulkWriteResult> result = template.bulkOps(BulkMode.ORDERED, Person.class)
|
||||
.insert(List.of(...))
|
||||
.execute();
|
||||
----
|
||||
======
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Server performance of batch and bulk is identical.
|
||||
However bulk operations do not publish xref:mongodb/lifecycle-events.adoc[lifecycle events].
|
||||
====
|
||||
|
||||
[[mongodb-template-update]]
|
||||
== Update
|
||||
|
||||
For updates, you can update the first document found by using `MongoOperation.updateFirst` or you can update all documents that were found to match the query by using the `MongoOperation.updateMulti` method or `all` on the fluent API.
|
||||
The following example shows an update of all `SAVINGS` accounts where we are adding a one-time $50.00 bonus to the balance by using the `$inc` operator:
|
||||
|
||||
.Updating documents by using the `MongoTemplate` / `ReactiveMongoTemplate`
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
// ...
|
||||
|
||||
UpdateResult result = template.update(Account.class)
|
||||
.matching(where("accounts.accountType").is(Type.SAVINGS))
|
||||
.apply(new Update().inc("accounts.$.balance", 50.00))
|
||||
.all();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
// ...
|
||||
|
||||
Mono<UpdateResult> result = template.update(Account.class)
|
||||
.matching(where("accounts.accountType").is(Type.SAVINGS))
|
||||
.apply(new Update().inc("accounts.$.balance", 50.00))
|
||||
.all();
|
||||
----
|
||||
======
|
||||
|
||||
In addition to the `Query` discussed earlier, we provide the update definition by using an `Update` object.
|
||||
The `Update` class has methods that match the update modifiers available for MongoDB.
|
||||
Most methods return the `Update` object to provide a fluent style for the API.
|
||||
|
||||
[[mongodb-template-update.methods]]
|
||||
=== Methods for Running Updates for Documents
|
||||
|
||||
* *updateFirst*: Updates the first document that matches the query document criteria with the updated document.
|
||||
* *updateMulti*: Updates all objects that match the query document criteria with the updated document.
|
||||
|
||||
WARNING: `updateFirst` does not support ordering. Please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`.
|
||||
|
||||
NOTE: Index hints for the update operation can be provided via `Query.withHint(...)`.
|
||||
|
||||
[[mongodb-template-update.update]]
|
||||
=== Methods in the `Update` Class
|
||||
|
||||
You can use a little "'syntax sugar'" with the `Update` class, as its methods are meant to be chained together.
|
||||
Also, you can kick-start the creation of a new `Update` instance by using `public static Update update(String key, Object value)` and using static imports.
|
||||
|
||||
The `Update` class contains the following methods:
|
||||
|
||||
* `Update` *addToSet* `(String key, Object value)` Update using the `$addToSet` update modifier
|
||||
* `Update` *currentDate* `(String key)` Update using the `$currentDate` update modifier
|
||||
* `Update` *currentTimestamp* `(String key)` Update using the `$currentDate` update modifier with `$type` `timestamp`
|
||||
* `Update` *inc* `(String key, Number inc)` Update using the `$inc` update modifier
|
||||
* `Update` *max* `(String key, Object max)` Update using the `$max` update modifier
|
||||
* `Update` *min* `(String key, Object min)` Update using the `$min` update modifier
|
||||
* `Update` *multiply* `(String key, Number multiplier)` Update using the `$mul` update modifier
|
||||
* `Update` *pop* `(String key, Update.Position pos)` Update using the `$pop` update modifier
|
||||
* `Update` *pull* `(String key, Object value)` Update using the `$pull` update modifier
|
||||
* `Update` *pullAll* `(String key, Object[] values)` Update using the `$pullAll` update modifier
|
||||
* `Update` *push* `(String key, Object value)` Update using the `$push` update modifier
|
||||
* `Update` *pushAll* `(String key, Object[] values)` Update using the `$pushAll` update modifier
|
||||
* `Update` *rename* `(String oldName, String newName)` Update using the `$rename` update modifier
|
||||
* `Update` *set* `(String key, Object value)` Update using the `$set` update modifier
|
||||
* `Update` *setOnInsert* `(String key, Object value)` Update using the `$setOnInsert` update modifier
|
||||
* `Update` *unset* `(String key)` Update using the `$unset` update modifier
|
||||
|
||||
Some update modifiers, such as `$push` and `$addToSet`, allow nesting of additional operators.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// { $push : { "category" : { "$each" : [ "spring" , "data" ] } } }
|
||||
new Update().push("category").each("spring", "data")
|
||||
|
||||
// { $push : { "key" : { "$position" : 0 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } }
|
||||
new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel"));
|
||||
|
||||
// { $push : { "key" : { "$slice" : 5 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } }
|
||||
new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel"));
|
||||
|
||||
// { $addToSet : { "values" : { "$each" : [ "spring" , "data" , "mongodb" ] } } }
|
||||
new Update().addToSet("values").each("spring", "data", "mongodb");
|
||||
----
|
||||
|
||||
[[mongo-template.aggregation-update]]
|
||||
=== Aggregation Pipeline Updates
|
||||
|
||||
Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an xref:mongodb/aggregation-framework.adoc[Aggregation Pipeline] via `AggregationUpdate`.
|
||||
Using `AggregationUpdate` allows leveraging https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline[MongoDB 4.2 aggregations] in an update operation.
|
||||
Using aggregations in an update allows updating one or more fields by expressing multiple stages and multiple conditions with a single operation.
|
||||
|
||||
The update can consist of the following stages:
|
||||
|
||||
* `AggregationUpdate.set(...).toValue(...)` -> `$set : { ... }`
|
||||
* `AggregationUpdate.unset(...)` -> `$unset : [ ... ]`
|
||||
* `AggregationUpdate.replaceWith(...)` -> `$replaceWith : { ... }`
|
||||
|
||||
.Update Aggregation
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
AggregationUpdate update = Aggregation.newUpdate()
|
||||
.set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) <1>
|
||||
.set("grade").toValue(ConditionalOperators.switchCases( <2>
|
||||
when(valueOf("average").greaterThanEqualToValue(90)).then("A"),
|
||||
when(valueOf("average").greaterThanEqualToValue(80)).then("B"),
|
||||
when(valueOf("average").greaterThanEqualToValue(70)).then("C"),
|
||||
when(valueOf("average").greaterThanEqualToValue(60)).then("D"))
|
||||
.defaultTo("F")
|
||||
);
|
||||
|
||||
template.update(Student.class) <3>
|
||||
.apply(update)
|
||||
.all(); <4>
|
||||
----
|
||||
[source,javascript]
|
||||
----
|
||||
db.students.update( <3>
|
||||
{ },
|
||||
[
|
||||
{ $set: { average : { $avg: "$tests" } } }, <1>
|
||||
{ $set: { grade: { $switch: { <2>
|
||||
branches: [
|
||||
{ case: { $gte: [ "$average", 90 ] }, then: "A" },
|
||||
{ case: { $gte: [ "$average", 80 ] }, then: "B" },
|
||||
{ case: { $gte: [ "$average", 70 ] }, then: "C" },
|
||||
{ case: { $gte: [ "$average", 60 ] }, then: "D" }
|
||||
],
|
||||
default: "F"
|
||||
} } } }
|
||||
],
|
||||
{ multi: true } <4>
|
||||
)
|
||||
----
|
||||
<1> The 1st `$set` stage calculates a new field _average_ based on the average of the _tests_ field.
|
||||
<2> The 2nd `$set` stage calculates a new field _grade_ based on the _average_ field calculated by the first aggregation stage.
|
||||
<3> The pipeline is run on the _students_ collection and uses `Student` for the aggregation field mapping.
|
||||
<4> Apply the update to all matching documents in the collection.
|
||||
====
|
||||
|
||||
[[mongo-template.upserts]]
|
||||
== Upsert
|
||||
|
||||
Related to performing an `updateFirst` operation, you can also perform an `upsert` operation, which will perform an insert if no document is found that matches the query.
|
||||
The document that is inserted is a combination of the query document and the update document.
|
||||
The following example shows how to use the `upsert` method:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
UpdateResult result = template.update(Person.class)
|
||||
.matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update"))
|
||||
.apply(update("address", addr))
|
||||
.upsert();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
Mono<UpdateResult> result = template.update(Person.class)
|
||||
.matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update"))
|
||||
.apply(update("address", addr))
|
||||
.upsert();
|
||||
----
|
||||
======
|
||||
|
||||
WARNING: `upsert` does not support ordering. Please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`.
|
||||
|
||||
[[mongo-template.find-and-upsert]]
|
||||
== Find and Modify
|
||||
|
||||
The `findAndModify(…)` method on `MongoCollection` can update a document and return either the old or newly updated document in a single operation.
|
||||
`MongoTemplate` provides four `findAndModify` overloaded methods that take `Query` and `Update` classes and converts from `Document` to your POJOs:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass, String collectionName);
|
||||
----
|
||||
|
||||
The following example inserts a few `Person` objects into the container and performs a `findAndUpdate` operation:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
template.insert(new Person("Tom", 21));
|
||||
template.insert(new Person("Dick", 22));
|
||||
template.insert(new Person("Harry", 23));
|
||||
|
||||
Query query = new Query(Criteria.where("firstName").is("Harry"));
|
||||
Update update = new Update().inc("age", 1);
|
||||
|
||||
Person oldValue = template.update(Person.class)
|
||||
.matching(query)
|
||||
.apply(update)
|
||||
.findAndModifyValue(); // oldValue.age == 23
|
||||
|
||||
Person newValue = template.query(Person.class)
|
||||
.matching(query)
|
||||
.findOneValue(); // newValye.age == 24
|
||||
|
||||
Person newestValue = template.update(Person.class)
|
||||
.matching(query)
|
||||
.apply(update)
|
||||
.withOptions(FindAndModifyOptions.options().returnNew(true)) // Now return the newly updated document when updating
|
||||
.findAndModifyValue(); // newestValue.age == 25
|
||||
----
|
||||
|
||||
The `FindAndModifyOptions` method lets you set the options of `returnNew`, `upsert`, and `remove`.
|
||||
An example extending from the previous code snippet follows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Person upserted = template.update(Person.class)
|
||||
.matching(new Query(Criteria.where("firstName").is("Mary")))
|
||||
.apply(update)
|
||||
.withOptions(FindAndModifyOptions.options().upsert(true).returnNew(true))
|
||||
.findAndModifyValue()
|
||||
----
|
||||
|
||||
[[mongo-template.find-and-replace]]
|
||||
== Find and Replace
|
||||
|
||||
The most straight forward method of replacing an entire `Document` is via its `id` using the `save` method.
|
||||
However this might not always be feasible.
|
||||
`findAndReplace` offers an alternative that allows to identify the document to replace via a simple query.
|
||||
|
||||
.Find and Replace Documents
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Optional<User> result = template.update(Person.class) <1>
|
||||
.matching(query(where("firstame").is("Tom"))) <2>
|
||||
.replaceWith(new Person("Dick"))
|
||||
.withOptions(FindAndReplaceOptions.options().upsert()) <3>
|
||||
.as(User.class) <4>
|
||||
.findAndReplace(); <5>
|
||||
----
|
||||
<1> Use the fluent update API with the domain type given for mapping the query and deriving the collection name or just use `MongoOperations#findAndReplace`.
|
||||
<2> The actual match query mapped against the given domain type. Provide `sort`, `fields` and `collation` settings via the query.
|
||||
<3> Additional optional hook to provide options other than the defaults, like `upsert`.
|
||||
<4> An optional projection type used for mapping the operation result. If none given the initial domain type is used.
|
||||
<5> Trigger the actual processing. Use `findAndReplaceValue` to obtain the nullable result instead of an `Optional`.
|
||||
====
|
||||
|
||||
IMPORTANT: Please note that the replacement must not hold an `id` itself as the `id` of the existing `Document` will be
|
||||
carried over to the replacement by the store itself. Also keep in mind that `findAndReplace` will only replace the first
|
||||
document matching the query criteria depending on a potentially given sort order.
|
||||
|
||||
[[mongo-template.delete]]
|
||||
== Delete
|
||||
|
||||
You can use one of five overloaded methods to remove an object from the database:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.remove(tywin, "GOT"); <1>
|
||||
|
||||
template.remove(query(where("lastname").is("lannister")), "GOT"); <2>
|
||||
|
||||
template.remove(new Query().limit(3), "GOT"); <3>
|
||||
|
||||
template.findAllAndRemove(query(where("lastname").is("lannister"), "GOT"); <4>
|
||||
|
||||
template.findAllAndRemove(new Query().limit(3), "GOT"); <5>
|
||||
----
|
||||
<1> Remove a single entity specified by its `_id` from the associated collection.
|
||||
<2> Remove all documents that match the criteria of the query from the `GOT` collection.
|
||||
<3> Remove the first three documents in the `GOT` collection. Unlike <2>, the documents to remove are identified by their `_id`, running the given query, applying `sort`, `limit`, and `skip` options first, and then removing all at once in a separate step.
|
||||
<4> Remove all documents matching the criteria of the query from the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one.
|
||||
<5> Remove the first three documents in the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one.
|
||||
====
|
||||
|
||||
[[mongo-template.optimistic-locking]]
|
||||
== Optimistic Locking
|
||||
|
||||
The `@Version` annotation provides syntax similar to that of JPA in the context of MongoDB and makes sure updates are only applied to documents with a matching version.
|
||||
Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the document in the meantime.
|
||||
In that case, an `OptimisticLockingFailureException` is thrown.
|
||||
The following example shows these features:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
class Person {
|
||||
|
||||
@Id String id;
|
||||
String firstname;
|
||||
String lastname;
|
||||
@Version Long version;
|
||||
}
|
||||
|
||||
Person daenerys = template.insert(new Person("Daenerys")); <1>
|
||||
|
||||
Person tmp = template.findOne(query(where("id").is(daenerys.getId())), Person.class); <2>
|
||||
|
||||
daenerys.setLastname("Targaryen");
|
||||
template.save(daenerys); <3>
|
||||
|
||||
template.save(tmp); // throws OptimisticLockingFailureException <4>
|
||||
----
|
||||
<1> Intially insert document. `version` is set to `0`.
|
||||
<2> Load the just inserted document. `version` is still `0`.
|
||||
<3> Update the document with `version = 0`. Set the `lastname` and bump `version` to `1`.
|
||||
<4> Try to update the previously loaded document that still has `version = 0`. The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`.
|
||||
====
|
||||
|
||||
IMPORTANT: Optimistic Locking requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed.
|
||||
|
||||
NOTE: As of Version 2.2 `MongoOperations` also includes the `@Version` property when removing an entity from the database.
|
||||
To remove a `Document` without version check use `MongoOperations#remove(Query,...)` instead of `MongoOperations#remove(Object)`.
|
||||
|
||||
NOTE: As of Version 2.2 repositories check for the outcome of acknowledged deletes when removing versioned entities.
|
||||
An `OptimisticLockingFailureException` is raised if a versioned entity cannot be deleted through `CrudRepository.delete(Object)`. In such case, the version was changed or the object was deleted in the meantime. Use `CrudRepository.deleteById(ID)` to bypass optimistic locking functionality and delete objects regardless of their version.
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
[[mongo.query.count]]
|
||||
= Counting Documents
|
||||
|
||||
The template API offers various methods to count the number of documents matching a given criteria.
|
||||
One of them outlined below.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.query(Person.class)
|
||||
.matching(query(where("firstname").is("luke")))
|
||||
.count();
|
||||
----
|
||||
====
|
||||
|
||||
In pre-3.x versions of SpringData MongoDB the count operation used MongoDBs internal collection statistics.
|
||||
With the introduction of xref:mongodb/client-session-transactions.adoc#mongo.transactions[MongoDB Transactions] this was no longer possible because statistics would not correctly reflect potential changes during a transaction requiring an aggregation-based count approach.
|
||||
So in version 2.x `MongoOperations.count()` would use the collection statistics if no transaction was in progress, and the aggregation variant if so.
|
||||
|
||||
As of Spring Data MongoDB 3.x any `count` operation uses regardless the existence of filter criteria the aggregation-based count approach via MongoDBs `countDocuments`.
|
||||
If the application is fine with the limitations of working upon collection statistics `MongoOperations.estimatedCount()` offers an alternative.
|
||||
|
||||
[TIP]
|
||||
====
|
||||
By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session].
|
||||
It will still be possible to obtain exact numbers via `MongoTemplate#exactCount`, but may speed up things.
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
MongoDBs native `countDocuments` method and the `$match` aggregation, do not support `$near` and `$nearSphere` but require `$geoWithin` along with `$center` or `$centerSphere` which does not support `$minDistance` (see https://jira.mongodb.org/browse/SERVER-37043).
|
||||
|
||||
Therefore a given `Query` will be rewritten for `count` operations using `Reactive`-/`MongoTemplate` to bypass the issue like shown below.
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
{ location : { $near : [-73.99171, 40.738868], $maxDistance : 1.1 } } <1>
|
||||
{ location : { $geoWithin : { $center: [ [-73.99171, 40.738868], 1.1] } } } <2>
|
||||
|
||||
{ location : { $near : [-73.99171, 40.738868], $minDistance : 0.1, $maxDistance : 1.1 } } <3>
|
||||
{$and :[ { $nor :[ { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 0.01] } } } ]}, { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 1.1] } } } ] } <4>
|
||||
----
|
||||
<1> Count source query using `$near`.
|
||||
<2> Rewritten query now using `$geoWithin` with `$center`.
|
||||
<3> Count source query using `$near` with `$minDistance` and `$maxDistance`.
|
||||
<4> Rewritten query now a combination of `$nor` `$geowithin` critierias to work around unsupported `$minDistance`.
|
||||
====
|
||||
|
||||
214
src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc
Normal file
214
src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc
Normal file
@@ -0,0 +1,214 @@
|
||||
[[gridfs]]
|
||||
= GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS.
|
||||
Spring Data MongoDB provides a `GridFsOperations` and `ReactiveGridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate` and `ReactiveGridFsTemplate`, to let you interact with the filesystem.
|
||||
You can set up a template instance by handing it a `MongoDatabaseFactory`/`ReactiveMongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
class ReactiveGridFsConfiguration extends AbstractReactiveMongoConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public ReactiveGridFsTemplate reactiveGridFsTemplate() {
|
||||
return new ReactiveGridFsTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
XML::
|
||||
+
|
||||
[source,xml,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
======
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFS to store files
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store.
|
||||
The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`.
|
||||
Alternatively, you can also provide a `Document`.
|
||||
====
|
||||
|
||||
Reactive::
|
||||
+
|
||||
====
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsTemplate operations;
|
||||
|
||||
@Test
|
||||
public Mono<ObjectId> storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Publisher<DataBuffer> file = … // lookup File or Resource
|
||||
|
||||
return operations.store(file, "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
The `store(…)` operations take an `Publisher<DataBuffer>`, a filename, and (optionally) metadata information about the file to store.
|
||||
The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `ReactiveGridFsTemplate`.
|
||||
Alternatively, you can also provide a `Document`.
|
||||
|
||||
The MongoDB's driver uses `AsyncInputStream` and `AsyncOutputStream` interfaces to exchange binary streams.
|
||||
Spring Data MongoDB adapts these interfaces to `Publisher<DataBuffer>`.
|
||||
Read more about `DataBuffer` in https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#databuffers[Spring's reference documentation].
|
||||
====
|
||||
======
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods.
|
||||
Let's have a look at the `find(…)` methods first.
|
||||
You can either find a single file or multiple files that match a `Query`.
|
||||
You can use the `GridFsCriteria` helper class to define queries.
|
||||
It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`.
|
||||
The following example shows how to use the template to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")));
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsTemplate operations;
|
||||
|
||||
@Test
|
||||
public Flux<GridFSFile> findFilesInGridFs() {
|
||||
return operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface.
|
||||
They allow handing an Ant path into the method and can thus retrieve files matching the given pattern.
|
||||
The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
public GridFsResources[] readFilesFromGridFs() {
|
||||
return operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsOperations operations;
|
||||
|
||||
public Flux<ReactiveGridFsResource> readFilesFromGridFs() {
|
||||
return operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
======
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
NOTE: By default, `GridFsTemplate` obtains `GridFSBucket` once upon the first GridFS interaction.
|
||||
After that, the template instance reuses the cached bucket.
|
||||
To use different buckets, from the same Template instance use the constructor accepting `Supplier<GridFSBucket>`.
|
||||
@@ -0,0 +1,945 @@
|
||||
[[mongo.query]]
|
||||
= Querying Documents
|
||||
:page-section-summary-toc: 1
|
||||
|
||||
You can use the `Query` and `Criteria` classes to express your queries.
|
||||
They have method names that mirror the native MongoDB operator names, such as `lt`, `lte`, `is`, and others.
|
||||
The `Query` and `Criteria` classes follow a fluent API style so that you can chain together multiple method criteria and queries while having easy-to-understand code.
|
||||
To improve readability, static imports let you avoid using the 'new' keyword for creating `Query` and `Criteria` instances.
|
||||
You can also use `BasicQuery` to create `Query` instances from plain JSON Strings, as shown in the following example:
|
||||
|
||||
.Creating a Query instance from a plain JSON String
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
BasicQuery query = new BasicQuery("{ age : { $lt : 50 }, accounts.balance : { $gt : 1000.00 }}");
|
||||
List<Person> result = mongoTemplate.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[[mongodb-template-query]]
|
||||
== Querying Documents in a Collection
|
||||
|
||||
Earlier, we saw how to retrieve a single document by using the `findOne` and `findById` methods on `MongoTemplate`.
|
||||
These methods return a single domain object right way or using a reactive API a `Mono` emitting a single element.
|
||||
We can also query for a collection of documents to be returned as a list of domain objects.
|
||||
Assuming that we have a number of `Person` objects with name and age stored as documents in a collection and that each person has an embedded account document with a balance, we can now run a query using the following code:
|
||||
|
||||
.Querying for documents using the MongoTemplate
|
||||
[tabs]
|
||||
======
|
||||
Imperative::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="primary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import static org.springframework.data.mongodb.core.query.Query.query;
|
||||
|
||||
// ...
|
||||
|
||||
List<Person> result = template.query(Person.class)
|
||||
.matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d)))
|
||||
.all();
|
||||
----
|
||||
|
||||
Reactive::
|
||||
+
|
||||
[source,java,indent=0,subs="verbatim,quotes",role="secondary"]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import static org.springframework.data.mongodb.core.query.Query.query;
|
||||
|
||||
// ...
|
||||
|
||||
Flux<Person> result = template.query(Person.class)
|
||||
.matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d)))
|
||||
.all();
|
||||
----
|
||||
======
|
||||
|
||||
All find methods take a `Query` object as a parameter.
|
||||
This object defines the criteria and options used to perform the query.
|
||||
The criteria are specified by using a `Criteria` object that has a static factory method named `where` to instantiate a new `Criteria` object.
|
||||
We recommend using static imports for `org.springframework.data.mongodb.core.query.Criteria.where` and `Query.query` to make the query more readable.
|
||||
|
||||
The query should return a `List` or `Flux` of `Person` objects that meet the specified criteria.
|
||||
The rest of this section lists the methods of the `Criteria` and `Query` classes that correspond to the operators provided in MongoDB.
|
||||
Most methods return the `Criteria` object, to provide a fluent style for the API.
|
||||
|
||||
[[mongodb-template-query.criteria]]
|
||||
.Methods of the Criteria Class
|
||||
[%collapsible]
|
||||
====
|
||||
The `Criteria` class provides the following methods, all of which correspond to operators in MongoDB:
|
||||
|
||||
* `Criteria` *all* `(Object o)` Creates a criterion using the `$all` operator
|
||||
* `Criteria` *and* `(String key)` Adds a chained `Criteria` with the specified `key` to the current `Criteria` and returns the newly created one
|
||||
* `Criteria` *andOperator* `(Criteria... criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later)
|
||||
* `Criteria` *andOperator* `(Collection<Criteria> criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later)
|
||||
* `Criteria` *elemMatch* `(Criteria c)` Creates a criterion using the `$elemMatch` operator
|
||||
* `Criteria` *exists* `(boolean b)` Creates a criterion using the `$exists` operator
|
||||
* `Criteria` *gt* `(Object o)` Creates a criterion using the `$gt` operator
|
||||
* `Criteria` *gte* `(Object o)` Creates a criterion using the `$gte` operator
|
||||
* `Criteria` *in* `(Object... o)` Creates a criterion using the `$in` operator for a varargs argument.
|
||||
* `Criteria` *in* `(Collection<?> collection)` Creates a criterion using the `$in` operator using a collection
|
||||
* `Criteria` *is* `(Object o)` Creates a criterion using field matching (`{ key:value }`). If the specified value is a document, the order of the fields and exact equality in the document matters.
|
||||
* `Criteria` *lt* `(Object o)` Creates a criterion using the `$lt` operator
|
||||
* `Criteria` *lte* `(Object o)` Creates a criterion using the `$lte` operator
|
||||
* `Criteria` *mod* `(Number value, Number remainder)` Creates a criterion using the `$mod` operator
|
||||
* `Criteria` *ne* `(Object o)` Creates a criterion using the `$ne` operator
|
||||
* `Criteria` *nin* `(Object... o)` Creates a criterion using the `$nin` operator
|
||||
* `Criteria` *norOperator* `(Criteria... criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria
|
||||
* `Criteria` *norOperator* `(Collection<Criteria> criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria
|
||||
* `Criteria` *not* `()` Creates a criterion using the `$not` meta operator which affects the clause directly following
|
||||
* `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria
|
||||
* `Criteria` *orOperator* `(Collection<Criteria> criteria)` Creates an or query using the `$or` operator for all of the provided criteria
|
||||
* `Criteria` *regex* `(String re)` Creates a criterion using a `$regex`
|
||||
* `Criteria` *sampleRate* `(double sampleRate)` Creates a criterion using the `$sampleRate` operator
|
||||
* `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator
|
||||
* `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator
|
||||
* `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for xref:mongodb/mapping/mapping-schema.adoc[JSON schema criteria]. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields.
|
||||
* `Criteria` *bits()* is the gateway to https://docs.mongodb.com/manual/reference/operator/query-bitwise/[MongoDB bitwise query operators] like `$bitsAllClear`.
|
||||
|
||||
The Criteria class also provides the following methods for geospatial queries.
|
||||
|
||||
* `Criteria` *within* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators.
|
||||
* `Criteria` *within* `(Box box)` Creates a geospatial criterion using a `$geoWithin $box` operation.
|
||||
* `Criteria` *withinSphere* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators.
|
||||
* `Criteria` *near* `(Point point)` Creates a geospatial criterion using a `$near` operation
|
||||
* `Criteria` *nearSphere* `(Point point)` Creates a geospatial criterion using `$nearSphere$center` operations. This is only available for MongoDB 1.7 and higher.
|
||||
* `Criteria` *minDistance* `(double minDistance)` Creates a geospatial criterion using the `$minDistance` operation, for use with $near.
|
||||
* `Criteria` *maxDistance* `(double maxDistance)` Creates a geospatial criterion using the `$maxDistance` operation, for use with $near.
|
||||
====
|
||||
|
||||
The `Query` class has some additional methods that allow to select certain fields as well as to limit and sort the result.
|
||||
|
||||
[[mongodb-template-query.query]]
|
||||
.Methods of the Query class
|
||||
[%collapsible]
|
||||
====
|
||||
* `Query` *addCriteria* `(Criteria criteria)` used to add additional criteria to the query
|
||||
* `Field` *fields* `()` used to define fields to be included in the query results
|
||||
* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging)
|
||||
* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging)
|
||||
* `Query` *with* `(Sort sort)` used to provide sort definition for the results
|
||||
* `Query` *with* `(ScrollPosition position)` used to provide a scroll position (Offset- or Keyset-based pagination) to start or resume a `Scroll`
|
||||
====
|
||||
|
||||
[[mongo-template.query.result-projection]]
|
||||
|
||||
The template API allows direct usage of result projections that enable you to map queries against a given domain type while projecting the operation result onto another one as outlined below.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class
|
||||
|
||||
template.query(SWCharacter.class)
|
||||
.as(Jedi.class)
|
||||
----
|
||||
|
||||
For more information on result projections please refer to the xref:repositories/projections.adoc[Projections] section of the documentation.
|
||||
|
||||
[[mongo-template.querying.field-selection]]
|
||||
== Selecting fields
|
||||
|
||||
MongoDB supports https://docs.mongodb.com/manual/tutorial/project-fields-from-query-results/[projecting fields] returned by a query.
|
||||
A projection can include and exclude fields (the `_id` field is always included unless explicitly excluded) based on their name.
|
||||
|
||||
.Selecting result fields
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class Person {
|
||||
|
||||
@Id String id;
|
||||
String firstname;
|
||||
|
||||
@Field("last_name")
|
||||
String lastname;
|
||||
|
||||
Address address;
|
||||
}
|
||||
|
||||
query.fields().include("lastname"); <1>
|
||||
|
||||
query.fields().exclude("id").include("lastname") <2>
|
||||
|
||||
query.fields().include("address") <3>
|
||||
|
||||
query.fields().include("address.city") <4>
|
||||
----
|
||||
<1> Result will contain both `_id` and `last_name` via `{ "last_name" : 1 }`.
|
||||
<2> Result will only contain the `last_name` via `{ "_id" : 0, "last_name" : 1 }`.
|
||||
<3> Result will contain the `_id` and entire `address` object via `{ "address" : 1 }`.
|
||||
<4> Result will contain the `_id` and and `address` object that only contains the `city` field via `{ "address.city" : 1 }`.
|
||||
====
|
||||
|
||||
Starting with MongoDB 4.4 you can use aggregation expressions for field projections as shown below:
|
||||
|
||||
.Computing result fields using expressions
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
query.fields()
|
||||
.project(MongoExpression.create("'$toUpper' : '$last_name'")) <1>
|
||||
.as("last_name"); <2>
|
||||
|
||||
query.fields()
|
||||
.project(StringOperators.valueOf("lastname").toUpper()) <3>
|
||||
.as("last_name");
|
||||
|
||||
query.fields()
|
||||
.project(AggregationSpELExpression.expressionOf("toUpper(lastname)")) <4>
|
||||
.as("last_name");
|
||||
----
|
||||
<1> Use a native expression. The used field name must refer to field names within the database document.
|
||||
<2> Assign the field name to which the expression result is projected. The resulting field name is not mapped against the domain model.
|
||||
<3> Use an `AggregationExpression`. Other than native `MongoExpression`, field names are mapped to the ones used in the domain model.
|
||||
<4> Use SpEL along with an `AggregationExpression` to invoke expression functions. Field names are mapped to the ones used in the domain model.
|
||||
====
|
||||
|
||||
`@Query(fields="…")` allows usage of expression field projections at `Repository` level as described in xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[MongoDB JSON-based Query Methods and Field Restriction].
|
||||
|
||||
[[mongo.query.additional-query-options]]
|
||||
== Additional Query Options
|
||||
|
||||
MongoDB offers various ways of applying meta information, like a comment or a batch size, to a query.Using the `Query` API
|
||||
directly there are several methods for those options.
|
||||
|
||||
[[mongo.query.hints]]
|
||||
=== Hints
|
||||
|
||||
Index hints can be applied in two ways, using the index name or its field definition.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.query(Person.class)
|
||||
.matching(query("...").withHint("index-to-use"));
|
||||
|
||||
template.query(Person.class)
|
||||
.matching(query("...").withHint("{ firstname : 1 }"));
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.query.cursor-size]]
|
||||
=== Cursor Batch Size
|
||||
|
||||
The cursor batch size defines the number of documents to return in each response batch.
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Query query = query(where("firstname").is("luke"))
|
||||
.cursorBatchSize(100)
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.query.collation]]
|
||||
=== Collations
|
||||
|
||||
Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Collation collation = Collation.of("de");
|
||||
|
||||
Query query = new Query(Criteria.where("firstName").is("Amél"))
|
||||
.collation(collation);
|
||||
|
||||
List<Person> results = template.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.query.read-preference]]
|
||||
=== Read Preference
|
||||
|
||||
The `ReadPreference` to use can be set directly on the `Query` object to be run as outlined below.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.find(Person.class)
|
||||
.matching(query(where(...)).withReadPreference(ReadPreference.secondary()))
|
||||
.all();
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: The preference set on the `Query` instance will supersede the default `ReadPreference` of `MongoTemplate`.
|
||||
|
||||
[[mongo.query.comment]]
|
||||
=== Comments
|
||||
|
||||
Queries can be equipped with comments which makes them easier to look up in server logs.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.find(Person.class)
|
||||
.matching(query(where(...)).comment("Use the force luke!"))
|
||||
.all();
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo-template.query.distinct]]
|
||||
== Query Distinct Values
|
||||
|
||||
MongoDB provides an operation to obtain distinct values for a single field by using a query from the resulting documents.
|
||||
Resulting values are not required to have the same data type, nor is the feature limited to simple types.
|
||||
For retrieval, the actual result type does matter for the sake of conversion and typing. The following example shows how to query for distinct values:
|
||||
|
||||
.Retrieving distinct values
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.query(Person.class) <1>
|
||||
.distinct("lastname") <2>
|
||||
.all(); <3>
|
||||
----
|
||||
<1> Query the `Person` collection.
|
||||
<2> Select distinct values of the `lastname` field. The field name is mapped according to the domain types property declaration, taking potential `@Field` annotations into account.
|
||||
<3> Retrieve all distinct values as a `List` of `Object` (due to no explicit result type being specified).
|
||||
====
|
||||
|
||||
Retrieving distinct values into a `Collection` of `Object` is the most flexible way, as it tries to determine the property value of the domain type and convert results to the desired type or mapping `Document` structures.
|
||||
|
||||
Sometimes, when all values of the desired field are fixed to a certain type, it is more convenient to directly obtain a correctly typed `Collection`, as shown in the following example:
|
||||
|
||||
.Retrieving strongly typed distinct values
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
template.query(Person.class) <1>
|
||||
.distinct("lastname") <2>
|
||||
.as(String.class) <3>
|
||||
.all(); <4>
|
||||
----
|
||||
<1> Query the collection of `Person`.
|
||||
<2> Select distinct values of the `lastname` field. The fieldname is mapped according to the domain types property declaration, taking potential `@Field` annotations into account.
|
||||
<3> Retrieved values are converted into the desired target type -- in this case, `String`. It is also possible to map the values to a more complex type if the stored field contains a document.
|
||||
<4> Retrieve all distinct values as a `List` of `String`. If the type cannot be converted into the desired target type, this method throws a `DataAccessException`.
|
||||
====
|
||||
|
||||
[[mongo.geospatial]]
|
||||
+= GeoSpatial Queries
|
||||
|
||||
MongoDB supports GeoSpatial queries through the use of operators such as `$near`, `$within`, `geoWithin`, and `$nearSphere`. Methods specific to geospatial queries are available on the `Criteria` class. There are also a few shape classes (`Box`, `Circle`, and `Point`) that are used in conjunction with geospatial related `Criteria` methods.
|
||||
|
||||
NOTE: Using GeoSpatial queries requires attention when used within MongoDB transactions, see xref:mongodb/client-session-transactions.adoc#mongo.transactions.behavior[Special behavior inside transactions].
|
||||
|
||||
To understand how to perform GeoSpatial queries, consider the following `Venue` class (taken from the integration tests and relying on the rich `MappingMongoConverter`):
|
||||
|
||||
.Venue.java
|
||||
[%collapsible]
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document(collection="newyork")
|
||||
public class Venue {
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
private String name;
|
||||
private double[] location;
|
||||
|
||||
@PersistenceConstructor
|
||||
Venue(String name, double[] location) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
public Venue(String name, double x, double y) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = new double[] { x, y };
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public double[] getLocation() {
|
||||
return location;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Venue [id=" + id + ", name=" + name + ", location="
|
||||
+ Arrays.toString(location) + "]";
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
To find locations within a `Circle`, you can use the following query:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Circle circle = new Circle(-73.99171, 40.738868, 0.01);
|
||||
List<Venue> venues =
|
||||
template.find(new Query(Criteria.where("location").within(circle)), Venue.class);
|
||||
----
|
||||
|
||||
To find venues within a `Circle` using spherical coordinates, you can use the following query:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784);
|
||||
List<Venue> venues =
|
||||
template.find(new Query(Criteria.where("location").withinSphere(circle)), Venue.class);
|
||||
----
|
||||
|
||||
To find venues within a `Box`, you can use the following query:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
//lower-left then upper-right
|
||||
Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404));
|
||||
List<Venue> venues =
|
||||
template.find(new Query(Criteria.where("location").within(box)), Venue.class);
|
||||
----
|
||||
|
||||
To find venues near a `Point`, you can use the following queries:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues =
|
||||
template.find(new Query(Criteria.where("location").near(point).maxDistance(0.01)), Venue.class);
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues =
|
||||
template.find(new Query(Criteria.where("location").near(point).minDistance(0.01).maxDistance(100)), Venue.class);
|
||||
----
|
||||
|
||||
To find venues near a `Point` using spherical coordinates, you can use the following query:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues =
|
||||
template.find(new Query(
|
||||
Criteria.where("location").nearSphere(point).maxDistance(0.003712240453784)),
|
||||
Venue.class);
|
||||
----
|
||||
|
||||
[[mongo.geo-near]]
|
||||
== Geo-near Queries
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
*Changed in 2.2!* +
|
||||
https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the
|
||||
`geoNear` command which had been previously used to run the `NearQuery`.
|
||||
|
||||
Spring Data MongoDB 2.2 `MongoOperations#geoNear` uses the `$geoNear` https://docs.mongodb.com/manual/reference/operator/aggregation/geoNear/[aggregation]
|
||||
instead of the `geoNear` command to run a `NearQuery`.
|
||||
|
||||
The calculated distance (the `dis` when using a geoNear command) previously returned within a wrapper type now is embedded
|
||||
into the resulting document.
|
||||
If the given domain type already contains a property with that name, the calculated distance
|
||||
is named `calculated-distance` with a potentially random postfix.
|
||||
|
||||
Target types may contain a property named after the returned distance to (additionally) read it back directly into the domain type as shown below.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
GeoResults<VenueWithDistanceField> = template.query(Venue.class) <1>
|
||||
.as(VenueWithDistanceField.class) <2>
|
||||
.near(NearQuery.near(new GeoJsonPoint(-73.99, 40.73), KILOMETERS))
|
||||
.all();
|
||||
----
|
||||
<1> Domain type used to identify the target collection and potential query mapping.
|
||||
<2> Target type containing a `dis` field of type `Number`.
|
||||
====
|
||||
|
||||
MongoDB supports querying the database for geo locations and calculating the distance from a given origin at the same time. With geo-near queries, you can express queries such as "find all restaurants in the surrounding 10 miles". To let you do so, `MongoOperations` provides `geoNear(…)` methods that take a `NearQuery` as an argument (as well as the already familiar entity type and collection), as shown in the following example:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Point location = new Point(-73.99171, 40.738868);
|
||||
NearQuery query = NearQuery.near(location).maxDistance(new Distance(10, Metrics.MILES));
|
||||
|
||||
GeoResults<Restaurant> = operations.geoNear(query, Restaurant.class);
|
||||
----
|
||||
|
||||
We use the `NearQuery` builder API to set up a query to return all `Restaurant` instances surrounding the given `Point` out to 10 miles.
|
||||
The `Metrics` enum used here actually implements an interface so that other metrics could be plugged into a distance as well.
|
||||
A `Metric` is backed by a multiplier to transform the distance value of the given metric into native distances.
|
||||
The sample shown here would consider the 10 to be miles. Using one of the built-in metrics (miles and kilometers) automatically triggers the spherical flag to be set on the query.
|
||||
If you want to avoid that, pass plain `double` values into `maxDistance(…)`.
|
||||
For more information, see the https://docs.spring.io/spring-data/mongodb/docs/{version}/api/index.html[JavaDoc] of `NearQuery` and `Distance`.
|
||||
|
||||
The geo-near operations return a `GeoResults` wrapper object that encapsulates `GeoResult` instances.
|
||||
Wrapping `GeoResults` allows accessing the average distance of all results.
|
||||
A single `GeoResult` object carries the entity found plus its distance from the origin.
|
||||
|
||||
[[mongo.geo-json]]
|
||||
== GeoJSON Support
|
||||
|
||||
MongoDB supports https://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. See the https://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions.
|
||||
|
||||
[[mongo.geo-json.domain.classes]]
|
||||
== GeoJSON Types in Domain Classes
|
||||
|
||||
Usage of https://geojson.org/[GeoJSON] types in domain classes is straightforward. The `org.springframework.data.mongodb.core.geo` package contains types such as `GeoJsonPoint`, `GeoJsonPolygon`, and others. These types are extend the existing `org.springframework.data.geo` types. The following example uses a `GeoJsonPoint`:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class Store {
|
||||
|
||||
String id;
|
||||
|
||||
/**
|
||||
* { "type" : "Point", "coordinates" : [ x, y ] }
|
||||
*/
|
||||
GeoJsonPoint location;
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
If the `coordinates` of a GeoJSON object represent _latitude_ and _longitude_ pairs, the _longitude_ goes first followed by _latitude_. +
|
||||
`GeoJsonPoint` therefore treats `getX()` as _longitude_ and `getY()` as _latitude_.
|
||||
====
|
||||
|
||||
[[mongo.geo-json.query-methods]]
|
||||
== GeoJSON Types in Repository Query Methods
|
||||
|
||||
Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query, as the following example shows:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface StoreRepository extends CrudRepository<Store, String> {
|
||||
|
||||
List<Store> findByLocationWithin(Polygon polygon); <1>
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* {
|
||||
* "location": {
|
||||
* "$geoWithin": {
|
||||
* "$geometry": {
|
||||
* "type": "Polygon",
|
||||
* "coordinates": [
|
||||
* [
|
||||
* [-73.992514,40.758934],
|
||||
* [-73.961138,40.760348],
|
||||
* [-73.991658,40.730006],
|
||||
* [-73.992514,40.758934]
|
||||
* ]
|
||||
* ]
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
repo.findByLocationWithin( <2>
|
||||
new GeoJsonPolygon(
|
||||
new Point(-73.992514, 40.758934),
|
||||
new Point(-73.961138, 40.760348),
|
||||
new Point(-73.991658, 40.730006),
|
||||
new Point(-73.992514, 40.758934))); <3>
|
||||
|
||||
/*
|
||||
* {
|
||||
* "location" : {
|
||||
* "$geoWithin" : {
|
||||
* "$polygon" : [ [-73.992514,40.758934] , [-73.961138,40.760348] , [-73.991658,40.730006] ]
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
repo.findByLocationWithin( <4>
|
||||
new Polygon(
|
||||
new Point(-73.992514, 40.758934),
|
||||
new Point(-73.961138, 40.760348),
|
||||
new Point(-73.991658, 40.730006)));
|
||||
----
|
||||
<1> Repository method definition using the commons type allows calling it with both the GeoJSON and the legacy format.
|
||||
<2> Use GeoJSON type to make use of `$geometry` operator.
|
||||
<3> Note that GeoJSON polygons need to define a closed ring.
|
||||
<4> Use the legacy format `$polygon` operator.
|
||||
====
|
||||
|
||||
[[mongo.geo-json.metrics]]
|
||||
== Metrics and Distance calculation
|
||||
|
||||
Then MongoDB `$geoNear` operator allows usage of a GeoJSON Point or legacy coordinate pairs.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
NearQuery.near(new Point(-73.99171, 40.738868))
|
||||
----
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"$geoNear": {
|
||||
//...
|
||||
"near": [-73.99171, 40.738868]
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
NearQuery.near(new GeoJsonPoint(-73.99171, 40.738868))
|
||||
----
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"$geoNear": {
|
||||
//...
|
||||
"near": { "type": "Point", "coordinates": [-73.99171, 40.738868] }
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
====
|
||||
|
||||
Though syntactically different the server is fine accepting both no matter what format the target Document within the collection
|
||||
is using.
|
||||
|
||||
WARNING: There is a huge difference in the distance calculation. Using the legacy format operates
|
||||
upon _Radians_ on an Earth like sphere, whereas the GeoJSON format uses _Meters_.
|
||||
|
||||
To avoid a serious headache make sure to set the `Metric` to the desired unit of measure which ensures the
|
||||
distance to be calculated correctly.
|
||||
|
||||
In other words:
|
||||
|
||||
====
|
||||
Assume you've got 5 Documents like the ones below:
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a5"),
|
||||
"name" : "Penn Station",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.99408, 40.75057 ] }
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a6"),
|
||||
"name" : "10gen Office",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a9"),
|
||||
"name" : "City Bakery ",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796aa"),
|
||||
"name" : "Splash Bar",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796ab"),
|
||||
"name" : "Momofuku Milk Bar",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.985839, 40.731698 ] }
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Fetching all Documents within a 400 Meter radius from `[-73.99171, 40.738868]` would look like this using
|
||||
GeoJSON:
|
||||
|
||||
.GeoNear with GeoJSON
|
||||
====
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"$geoNear": {
|
||||
"maxDistance": 400, <1>
|
||||
"num": 10,
|
||||
"near": { type: "Point", coordinates: [-73.99171, 40.738868] },
|
||||
"spherical":true, <2>
|
||||
"key": "location",
|
||||
"distanceField": "distance"
|
||||
}
|
||||
}
|
||||
----
|
||||
Returning the following 3 Documents:
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a6"),
|
||||
"name" : "10gen Office",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
|
||||
"distance" : 0.0 <3>
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a9"),
|
||||
"name" : "City Bakery ",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
"distance" : 69.3582262492474 <3>
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796aa"),
|
||||
"name" : "Splash Bar",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
"distance" : 69.3582262492474 <3>
|
||||
}
|
||||
----
|
||||
<1> Maximum distance from center point in _Meters_.
|
||||
<2> GeoJSON always operates upon a sphere.
|
||||
<3> Distance from center point in _Meters_.
|
||||
====
|
||||
|
||||
Now, when using legacy coordinate pairs one operates upon _Radians_ as discussed before. So we use `Metrics#KILOMETERS
|
||||
when constructing the `$geoNear` command. The `Metric` makes sure the distance multiplier is set correctly.
|
||||
|
||||
.GeoNear with Legacy Coordinate Pairs
|
||||
====
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"$geoNear": {
|
||||
"maxDistance": 0.0000627142377, <1>
|
||||
"distanceMultiplier": 6378.137, <2>
|
||||
"num": 10,
|
||||
"near": [-73.99171, 40.738868],
|
||||
"spherical":true, <3>
|
||||
"key": "location",
|
||||
"distanceField": "distance"
|
||||
}
|
||||
}
|
||||
----
|
||||
Returning the 3 Documents just like the GeoJSON variant:
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a6"),
|
||||
"name" : "10gen Office",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
|
||||
"distance" : 0.0 <4>
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796a9"),
|
||||
"name" : "City Bakery ",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
"distance" : 0.0693586286032982 <4>
|
||||
}
|
||||
{
|
||||
"_id" : ObjectId("5c10f3735d38908db52796aa"),
|
||||
"name" : "Splash Bar",
|
||||
"location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
|
||||
"distance" : 0.0693586286032982 <4>
|
||||
}
|
||||
----
|
||||
<1> Maximum distance from center point in _Radians_.
|
||||
<2> The distance multiplier so we get _Kilometers_ as resulting distance.
|
||||
<3> Make sure we operate on a 2d_sphere index.
|
||||
<4> Distance from center point in _Kilometers_ - take it times 1000 to match _Meters_ of the GeoJSON variant.
|
||||
====
|
||||
|
||||
[[mongo.textsearch]]
|
||||
== Full-text Search
|
||||
|
||||
Since version 2.6 of MongoDB, you can run full-text queries by using the `$text` operator. Methods and operations specific to full-text queries are available in `TextQuery` and `TextCriteria`. When doing full text search, see the https://docs.mongodb.org/manual/reference/operator/query/text/#behavior[MongoDB reference] for its behavior and limitations.
|
||||
|
||||
Before you can actually use full-text search, you must set up the search index correctly.
|
||||
See xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Index] for more detail on how to create index structures.
|
||||
The following example shows how to set up a full-text search:
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
db.foo.createIndex(
|
||||
{
|
||||
title : "text",
|
||||
content : "text"
|
||||
},
|
||||
{
|
||||
weights : {
|
||||
title : 3
|
||||
}
|
||||
}
|
||||
)
|
||||
----
|
||||
|
||||
A query searching for `coffee cake` can be defined and run as follows:
|
||||
|
||||
.Full Text Query
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Query query = TextQuery
|
||||
.queryText(new TextCriteria().matchingAny("coffee", "cake"));
|
||||
|
||||
List<Document> page = template.find(query, Document.class);
|
||||
----
|
||||
====
|
||||
|
||||
To sort results by relevance according to the `weights` use `TextQuery.sortByScore`.
|
||||
|
||||
.Full Text Query - Sort by Score
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Query query = TextQuery
|
||||
.queryText(new TextCriteria().matchingAny("coffee", "cake"))
|
||||
.sortByScore() <1>
|
||||
.includeScore(); <2>
|
||||
|
||||
List<Document> page = template.find(query, Document.class);
|
||||
----
|
||||
<1> Use the score property for sorting results by relevance which triggers `.sort({'score': {'$meta': 'textScore'}})`.
|
||||
<2> Use `TextQuery.includeScore()` to include the calculated relevance in the resulting `Document`.
|
||||
====
|
||||
|
||||
You can exclude search terms by prefixing the term with `-` or by using `notMatching`, as shown in the following example (note that the two lines have the same effect and are thus redundant):
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// search for 'coffee' and not 'cake'
|
||||
TextQuery.queryText(new TextCriteria().matching("coffee").matching("-cake"));
|
||||
TextQuery.queryText(new TextCriteria().matching("coffee").notMatching("cake"));
|
||||
----
|
||||
|
||||
`TextCriteria.matching` takes the provided term as is.
|
||||
Therefore, you can define phrases by putting them between double quotation marks (for example, `\"coffee cake\")` or using by `TextCriteria.phrase.`
|
||||
The following example shows both ways of defining a phrase:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// search for phrase 'coffee cake'
|
||||
TextQuery.queryText(new TextCriteria().matching("\"coffee cake\""));
|
||||
TextQuery.queryText(new TextCriteria().phrase("coffee cake"));
|
||||
----
|
||||
|
||||
You can set flags for `$caseSensitive` and `$diacriticSensitive` by using the corresponding methods on `TextCriteria`.
|
||||
Note that these two optional flags have been introduced in MongoDB 3.2 and are not included in the query unless explicitly set.
|
||||
|
||||
[[mongo.query-by-example]]
|
||||
== Query by Example
|
||||
|
||||
xref:repositories/query-by-example.adoc[Query by Example] can be used on the Template API level run example queries.
|
||||
|
||||
The following snipped shows how to query by example:
|
||||
|
||||
.Typed Example Query
|
||||
[source,java]
|
||||
----
|
||||
Person probe = new Person();
|
||||
probe.lastname = "stark";
|
||||
|
||||
Example example = Example.of(probe);
|
||||
|
||||
Query query = new Query(new Criteria().alike(example));
|
||||
List<Person> result = template.find(query, Person.class);
|
||||
----
|
||||
|
||||
By default `Example` is strictly typed. This means that the mapped query has an included type match, restricting it to probe assignable types.
|
||||
For example, when sticking with the default type key (`_class`), the query has restrictions such as (`_class : { $in : [ com.acme.Person] }`).
|
||||
|
||||
By using the `UntypedExampleMatcher`, it is possible to bypass the default behavior and skip the type restriction. So, as long as field names match, nearly any domain type can be used as the probe for creating the reference, as the following example shows:
|
||||
|
||||
.Untyped Example Query
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
|
||||
class JustAnArbitraryClassWithMatchingFieldName {
|
||||
@Field("lastname") String value;
|
||||
}
|
||||
|
||||
JustAnArbitraryClassWithMatchingFieldNames probe = new JustAnArbitraryClassWithMatchingFieldNames();
|
||||
probe.value = "stark";
|
||||
|
||||
Example example = Example.of(probe, UntypedExampleMatcher.matching());
|
||||
|
||||
Query query = new Query(new Criteria().alike(example));
|
||||
List<Person> result = template.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
When including `null` values in the `ExampleSpec`, Spring Data Mongo uses embedded document matching instead of dot notation property matching.
|
||||
Doing so forces exact document matching for all property values and the property order in the embedded document.
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
`UntypedExampleMatcher` is likely the right choice for you if you are storing different entities within a single collection or opted out of writing type hints.
|
||||
|
||||
Also, keep in mind that using `@TypeAlias` requires eager initialization of the `MappingContext`. To do so, configure `initialEntitySet` to to ensure proper alias resolution for read operations.
|
||||
====
|
||||
|
||||
Spring Data MongoDB provides support for different matching options:
|
||||
|
||||
.`StringMatcher` options
|
||||
[%collapsible]
|
||||
====
|
||||
[cols="1,2", options="header"]
|
||||
|===
|
||||
| Matching
|
||||
| Logical result
|
||||
|
||||
| `DEFAULT` (case-sensitive)
|
||||
| `{"firstname" : firstname}`
|
||||
|
||||
| `DEFAULT` (case-insensitive)
|
||||
| `{"firstname" : { $regex: firstname, $options: 'i'}}`
|
||||
|
||||
| `EXACT` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /^firstname$/}}`
|
||||
|
||||
| `EXACT` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}`
|
||||
|
||||
| `STARTING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /^firstname/}}`
|
||||
|
||||
| `STARTING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}`
|
||||
|
||||
| `ENDING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /firstname$/}}`
|
||||
|
||||
| `ENDING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}`
|
||||
|
||||
| `CONTAINING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /.\*firstname.*/}}`
|
||||
|
||||
| `CONTAINING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}`
|
||||
|
||||
| `REGEX` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /firstname/}}`
|
||||
|
||||
| `REGEX` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /firstname/, $options: 'i'}}`
|
||||
|
||||
|===
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema.query]]
|
||||
== Query a collection for matching JSON Schema
|
||||
|
||||
You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows:
|
||||
|
||||
.Query for Documents matching a `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.find(query(matchingDocumentStructure(schema)), Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
Please refer to the xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] section to learn more about the schema support in Spring Data MongoDB.
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
[[observability-conventions]]
|
||||
= Conventions
|
||||
|
||||
Below you can find a list of all `GlobalObservationConvention` and `ObservationConvention` declared by this project.
|
||||
|
||||
.ObservationConvention implementations
|
||||
|===
|
||||
|ObservationConvention Class Name | Applicable ObservationContext Class Name
|
||||
|`org.springframework.data.mongodb.observability.DefaultMongoHandlerObservationConvention`|`MongoHandlerContext`
|
||||
|`org.springframework.data.mongodb.observability.MongoHandlerObservationConvention`|`MongoHandlerContext`
|
||||
|===
|
||||
@@ -0,0 +1,39 @@
|
||||
[[observability-metrics]]
|
||||
= Metrics
|
||||
|
||||
Below you can find a list of all metrics declared by this project.
|
||||
|
||||
[[observability-metrics-mongodb-command-observation]]
|
||||
== Mongodb Command Observation
|
||||
|
||||
____
|
||||
Timer created around a MongoDB command execution.
|
||||
____
|
||||
|
||||
**Metric name** `spring.data.mongodb.command`. **Type** `timer`.
|
||||
|
||||
**Metric name** `spring.data.mongodb.command.active`. **Type** `long task timer`.
|
||||
|
||||
IMPORTANT: KeyValues that are added after starting the Observation might be missing from the *.active metrics.
|
||||
|
||||
IMPORTANT: Micrometer internally uses `nanoseconds` for the baseunit. However, each backend determines the actual baseunit. (i.e. Prometheus uses seconds)
|
||||
|
||||
Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`.
|
||||
|
||||
.Low cardinality Keys
|
||||
[cols="a,a"]
|
||||
|===
|
||||
|Name | Description
|
||||
|`db.connection_string` _(required)_|MongoDB connection string.
|
||||
|`db.mongodb.collection` _(required)_|MongoDB collection name.
|
||||
|`db.name` _(required)_|MongoDB database name.
|
||||
|`db.operation` _(required)_|MongoDB command value.
|
||||
|`db.system` _(required)_|MongoDB database system.
|
||||
|`db.user` _(required)_|MongoDB user.
|
||||
|`net.peer.name` _(required)_|Name of the database host.
|
||||
|`net.peer.port` _(required)_|Logical remote port number.
|
||||
|`net.sock.peer.addr` _(required)_|Mongo peer address.
|
||||
|`net.sock.peer.port` _(required)_|Mongo peer port.
|
||||
|`net.transport` _(required)_|Network transport.
|
||||
|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier.
|
||||
|===
|
||||
@@ -1,7 +1,7 @@
|
||||
:root-target: ../../../../target/
|
||||
|
||||
[[mongodb.observability]]
|
||||
== Observability
|
||||
= Observability
|
||||
|
||||
Spring Data MongoDB currently has the most up-to-date code to support Observability in your MongoDB application.
|
||||
These changes, however, haven't been picked up by Spring Boot (yet).
|
||||
@@ -39,11 +39,4 @@ Be sure to add any other relevant settings needed to configure the tracer you ar
|
||||
====
|
||||
|
||||
This should do it! You are now running with Spring Data MongoDB's usage of Spring Observability's `Observation` API.
|
||||
|
||||
include::{root-target}_conventions.adoc[]
|
||||
|
||||
include::{root-target}_metrics.adoc[]
|
||||
|
||||
include::{root-target}_spans.adoc[]
|
||||
|
||||
See also https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/database/#mongodb[OpenTelemetry Semantic Conventions] for further reference.
|
||||
30
src/main/antora/modules/ROOT/pages/observability/spans.adoc
Normal file
30
src/main/antora/modules/ROOT/pages/observability/spans.adoc
Normal file
@@ -0,0 +1,30 @@
|
||||
[[observability-spans]]
|
||||
= Spans
|
||||
|
||||
Below you can find a list of all spans declared by this project.
|
||||
|
||||
[[observability-spans-mongodb-command-observation]]
|
||||
== Mongodb Command Observation Span
|
||||
|
||||
> Timer created around a MongoDB command execution.
|
||||
|
||||
**Span name** `spring.data.mongodb.command`.
|
||||
|
||||
Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`.
|
||||
|
||||
.Tag Keys
|
||||
|===
|
||||
|Name | Description
|
||||
|`db.connection_string` _(required)_|MongoDB connection string.
|
||||
|`db.mongodb.collection` _(required)_|MongoDB collection name.
|
||||
|`db.name` _(required)_|MongoDB database name.
|
||||
|`db.operation` _(required)_|MongoDB command value.
|
||||
|`db.system` _(required)_|MongoDB database system.
|
||||
|`db.user` _(required)_|MongoDB user.
|
||||
|`net.peer.name` _(required)_|Name of the database host.
|
||||
|`net.peer.port` _(required)_|Logical remote port number.
|
||||
|`net.sock.peer.addr` _(required)_|Mongo peer address.
|
||||
|`net.sock.peer.port` _(required)_|Mongo peer port.
|
||||
|`net.transport` _(required)_|Network transport.
|
||||
|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier.
|
||||
|===
|
||||
@@ -48,7 +48,7 @@ In terms of document stores, you need at least version 3.6 of https://www.mongod
|
||||
|
||||
The following compatibility matrix summarizes Spring Data versions to MongoDB driver/database versions.
|
||||
Database versions show the highest supported server version that pass the Spring Data test suite.
|
||||
You can use newer server versions unless your application uses functionality that is affected by <<compatibility.changes,changes in the MongoDB server>>.
|
||||
You can use newer server versions unless your application uses functionality that is affected by xref:preface.adoc#compatibility.changes[changes in the MongoDB server].
|
||||
See also the https://www.mongodb.com/docs/drivers/java/sync/current/compatibility/[official MongoDB driver compatibility matrix] for driver- and server version compatibility.
|
||||
|
||||
[cols="h,m,m,m", options="header"]
|
||||
8
src/main/antora/modules/ROOT/pages/repositories.adoc
Normal file
8
src/main/antora/modules/ROOT/pages/repositories.adoc
Normal file
@@ -0,0 +1,8 @@
|
||||
[[mongodb.repositories]]
|
||||
= Repositories
|
||||
:page-section-summary-toc: 1
|
||||
|
||||
This chapter explains the basic foundations of Spring Data repositories and MongoDB specifics.
|
||||
Before continuing to the MongoDB specifics, make sure you have a sound understanding of the basic concepts.
|
||||
|
||||
The goal of the Spring Data repository abstraction is to significantly reduce the amount of boilerplate code required to implement data access layers for various persistence stores.
|
||||
@@ -0,0 +1,12 @@
|
||||
include::{commons}@data-commons::page$repositories/core-concepts.adoc[]
|
||||
|
||||
[[mongodb.entity-persistence.state-detection-strategies]]
|
||||
include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1]
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Cassandra provides no means to generate identifiers upon inserting data.
|
||||
As consequence, entities must be associated with identifier values.
|
||||
Spring Data defaults to identifier inspection to determine whether an entity is new.
|
||||
If you want to use xref:mongodb/auditing.adoc[auditing] make sure to either use xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] or implement `Persistable` for proper entity state detection.
|
||||
====
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/core-domain-events.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/core-extensions.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/create-instances.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/custom-implementations.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/definition.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/null-handling.adoc[]
|
||||
@@ -0,0 +1,2 @@
|
||||
[[mongodb.projections]]
|
||||
include::{commons}@data-commons::page$repositories/projections.adoc[]
|
||||
@@ -0,0 +1,25 @@
|
||||
include::{commons}@data-commons::query-by-example.adoc[]
|
||||
|
||||
[[query-by-example.running]]
|
||||
== Running an Example
|
||||
|
||||
The following example shows how to query by example when using a repository (of `Person` objects, in this case):
|
||||
|
||||
.Query by Example using a repository
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
public interface PersonRepository extends QueryByExampleExecutor<Person> {
|
||||
|
||||
}
|
||||
|
||||
public class PersonService {
|
||||
|
||||
@Autowired PersonRepository personRepository;
|
||||
|
||||
public List<Person> findPeople(Person probe) {
|
||||
return personRepository.findAll(Example.of(probe));
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/query-keywords-reference.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/query-methods-details.adoc[]
|
||||
@@ -0,0 +1 @@
|
||||
include::{commons}@data-commons::page$repositories/query-return-types-reference.adoc[]
|
||||
22
src/main/antora/resources/antora-resources/antora.yml
Normal file
22
src/main/antora/resources/antora-resources/antora.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
version: ${antora-component.version}
|
||||
prerelease: ${antora-component.prerelease}
|
||||
|
||||
asciidoc:
|
||||
attributes:
|
||||
version: ${project.version}
|
||||
springversionshort: ${spring.short}
|
||||
springversion: ${spring}
|
||||
attribute-missing: 'warn'
|
||||
commons: ${springdata.commons.docs}
|
||||
include-xml-namespaces: false
|
||||
spring-data-commons-docs-url: https://docs.spring.io/spring-data-commons/reference
|
||||
spring-data-commons-javadoc-base: https://docs.spring.io/spring-data/commons/docs/${springdata.commons}/api/
|
||||
springdocsurl: https://docs.spring.io/spring-framework/reference/{springversionshort}
|
||||
springjavadocurl: https://docs.spring.io/spring-framework/docs/${spring}/javadoc-api
|
||||
spring-framework-docs: '{springdocsurl}'
|
||||
spring-framework-javadoc: '{springjavadocurl}'
|
||||
springhateoasversion: ${spring-hateoas}
|
||||
releasetrainversion: ${releasetrain}
|
||||
reactor: ${reactor}
|
||||
mongoversion: ${mongo}
|
||||
store: Mongo
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 64 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 8.9 KiB |
@@ -1,48 +0,0 @@
|
||||
= Spring Data MongoDB - Reference Documentation
|
||||
Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch; Jay Bryant
|
||||
:revnumber: {version}
|
||||
:revdate: {localdate}
|
||||
ifdef::backend-epub3[:front-cover-image: image:epub-cover.png[Front Cover,1050,1600]]
|
||||
:spring-data-commons-docs: ../../../../spring-data-commons/src/main/asciidoc
|
||||
:store: Mongo
|
||||
|
||||
:feature-scroll: true
|
||||
|
||||
(C) 2008-2022 The original authors.
|
||||
|
||||
NOTE: Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically.
|
||||
|
||||
toc::[]
|
||||
|
||||
include::preface.adoc[]
|
||||
|
||||
include::upgrading.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/dependencies.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/repositories.adoc[leveloffset=+1]
|
||||
|
||||
[[reference]]
|
||||
= Reference Documentation
|
||||
|
||||
include::reference/introduction.adoc[leveloffset=+1]
|
||||
include::reference/mongodb.adoc[leveloffset=+1]
|
||||
include::reference/observability.adoc[leveloffset=+1]
|
||||
include::reference/client-session-transactions.adoc[leveloffset=+1]
|
||||
include::reference/reactive-mongodb.adoc[leveloffset=+1]
|
||||
include::reference/mongo-repositories.adoc[leveloffset=+1]
|
||||
include::reference/reactive-mongo-repositories.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/auditing.adoc[leveloffset=+1]
|
||||
include::reference/mongo-auditing.adoc[leveloffset=+1]
|
||||
include::reference/mapping.adoc[leveloffset=+1]
|
||||
include::reference/sharding.adoc[leveloffset=+1]
|
||||
include::reference/mongo-encryption.adoc[leveloffset=+1]
|
||||
include::reference/kotlin.adoc[leveloffset=+1]
|
||||
include::reference/jmx.adoc[leveloffset=+1]
|
||||
|
||||
[[appendix]]
|
||||
= Appendix
|
||||
|
||||
:numbered!:
|
||||
include::{spring-data-commons-docs}/repository-namespace-reference.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[leveloffset=+1]
|
||||
include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[leveloffset=+1]
|
||||
@@ -1,115 +0,0 @@
|
||||
[[gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
.XML
|
||||
[source,xml,role="secondary"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")));
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
GridFsResources[] txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
NOTE: By default, `GridFsTemplate` obtains `GridFSBucket` once upon the first GridFS interaction.
|
||||
After that, the Template instance reuses the cached bucket.
|
||||
To use different buckets, from the same Template instance use the constructor accepting `Supplier<GridFSBucket>`.
|
||||
@@ -1,10 +0,0 @@
|
||||
[[introduction]]
|
||||
= Introduction
|
||||
|
||||
== Document Structure
|
||||
|
||||
This part of the reference documentation explains the core functionality offered by Spring Data MongoDB.
|
||||
|
||||
"`<<mongo.core>>`" introduces the MongoDB module feature set.
|
||||
|
||||
"`<<mongo.repositories>>`" introduces the repository support for MongoDB.
|
||||
@@ -1,29 +0,0 @@
|
||||
include::../{spring-data-commons-docs}/kotlin.adoc[]
|
||||
|
||||
include::../{spring-data-commons-docs}/kotlin-extensions.adoc[leveloffset=+1]
|
||||
|
||||
To retrieve a list of `SWCharacter` objects in Java, you would normally write the following:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Flux<SWCharacter> characters = template.find(SWCharacter.class).inCollection("star-wars").all()
|
||||
----
|
||||
|
||||
With Kotlin and the Spring Data extensions, you can instead write the following:
|
||||
|
||||
[source,kotlin]
|
||||
----
|
||||
val characters = template.find<SWCharacter>().inCollection("star-wars").all()
|
||||
// or (both are equivalent)
|
||||
val characters : Flux<SWCharacter> = template.find().inCollection("star-wars").all()
|
||||
----
|
||||
|
||||
As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax.
|
||||
|
||||
Spring Data MongoDB provides the following extensions:
|
||||
|
||||
* Reified generics support for `MongoOperations`, `ReactiveMongoOperations`, `FluentMongoOperations`, `ReactiveFluentMongoOperations`, and `Criteria`.
|
||||
* <<mongo.query.kotlin-support>>
|
||||
* <<kotlin.coroutines>> extensions for `ReactiveFluentMongoOperations`.
|
||||
|
||||
include::../{spring-data-commons-docs}/kotlin-coroutines.adoc[leveloffset=+1]
|
||||
@@ -1,49 +0,0 @@
|
||||
[[mongo.auditing]]
|
||||
== General Auditing Configuration for MongoDB
|
||||
|
||||
Since Spring Data MongoDB 1.4, auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation, as the following example shows:
|
||||
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableMongoAuditing
|
||||
class Config {
|
||||
|
||||
@Bean
|
||||
public AuditorAware<AuditableUser> myAuditorProvider() {
|
||||
return new AuditorAwareImpl();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
.XML
|
||||
[source,xml,role="secondary"]
|
||||
----
|
||||
<mongo:auditing mapping-context-ref="customMappingContext" auditor-aware-ref="yourAuditorAwareImpl"/>
|
||||
----
|
||||
====
|
||||
|
||||
If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`.
|
||||
|
||||
To enable auditing, leveraging a reactive programming model, use the `@EnableReactiveMongoAuditing` annotation. +
|
||||
If you expose a bean of type `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableReactiveMongoAuditing`.
|
||||
|
||||
.Activating reactive auditing using JavaConfig
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
@EnableReactiveMongoAuditing
|
||||
class Config {
|
||||
|
||||
@Bean
|
||||
public ReactiveAuditorAware<AuditableUser> myAuditorProvider() {
|
||||
return new AuditorAwareImpl();
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
[[mongo.entity-callbacks]]
|
||||
= Store specific EntityCallbacks
|
||||
|
||||
Spring Data MongoDB uses the `EntityCallback` API for its auditing support and reacts on the following callbacks.
|
||||
|
||||
.Supported Entity Callbacks
|
||||
[%header,cols="4"]
|
||||
|===
|
||||
| Callback
|
||||
| Method
|
||||
| Description
|
||||
| Order
|
||||
|
||||
| Reactive/BeforeConvertCallback
|
||||
| `onBeforeConvert(T entity, String collection)`
|
||||
| Invoked before a domain object is converted to `org.bson.Document`.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| Reactive/AfterConvertCallback
|
||||
| `onAfterConvert(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked after a domain object is loaded. +
|
||||
Can modify the domain object after reading it from a `org.bson.Document`.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| Reactive/AuditingEntityCallback
|
||||
| `onBeforeConvert(Object entity, String collection)`
|
||||
| Marks an auditable entity _created_ or _modified_
|
||||
| 100
|
||||
|
||||
| Reactive/BeforeSaveCallback
|
||||
| `onBeforeSave(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked before a domain object is saved. +
|
||||
Can modify the target, to be persisted, `Document` containing all mapped entity information.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
| Reactive/AfterSaveCallback
|
||||
| `onAfterSave(T entity, org.bson.Document target, String collection)`
|
||||
| Invoked before a domain object is saved. +
|
||||
Can modify the domain object, to be returned after save, `Document` containing all mapped entity information.
|
||||
| `Ordered.LOWEST_PRECEDENCE`
|
||||
|
||||
|===
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
[[mongodb.repositories.queries.aggregation]]
|
||||
=== Aggregation Repository Methods
|
||||
|
||||
The repository layer offers means to interact with <<mongo.aggregation, the aggregation framework>> via annotated repository query methods.
|
||||
Similar to the <<mongodb.repositories.queries.json-based, JSON based queries>>, you can define a pipeline using the `org.springframework.data.mongodb.repository.Aggregation` annotation.
|
||||
The definition may contain simple placeholders like `?0` as well as link:{springDocsUrl}/core.html#expressions[SpEL expressions] `?#{ … }`.
|
||||
|
||||
.Aggregating Repository Method
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames(); <1>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames(Sort sort); <2>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
|
||||
List<PersonAggregate> groupByLastnameAnd(String property); <3>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
|
||||
Slice<PersonAggregate> groupByLastnameAnd(String property, Pageable page); <4>
|
||||
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
Stream<PersonAggregate> groupByLastnameAndFirstnamesAsStream(); <5>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
SumValue sumAgeUsingValueWrapper(); <6>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
Long sumAge(); <7>
|
||||
|
||||
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")
|
||||
AggregationResults<SumValue> sumAgeRaw(); <8>
|
||||
|
||||
@Aggregation("{ '$project': { '_id' : '$lastname' } }")
|
||||
List<String> findAllLastnames(); <9>
|
||||
|
||||
@Aggregation(pipeline = {
|
||||
"{ $group : { _id : '$author', books: { $push: '$title' } } }",
|
||||
"{ $out : 'authors' }"
|
||||
})
|
||||
void groupAndOutSkippingOutput(); <10>
|
||||
}
|
||||
----
|
||||
[source,java]
|
||||
----
|
||||
public class PersonAggregate {
|
||||
|
||||
private @Id String lastname; <2>
|
||||
private List<String> names;
|
||||
|
||||
public PersonAggregate(String lastname, List<String> names) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Getter / Setter omitted
|
||||
}
|
||||
|
||||
public class SumValue {
|
||||
|
||||
private final Long total; <6> <8>
|
||||
|
||||
public SumValue(Long total) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Getter omitted
|
||||
}
|
||||
----
|
||||
<1> Aggregation pipeline to group first names by `lastname` in the `Person` collection returning these as `PersonAggregate`.
|
||||
<2> If `Sort` argument is present, `$sort` is appended after the declared pipeline stages so that it only affects the order of the final results after having passed all other aggregation stages.
|
||||
Therefore, the `Sort` properties are mapped against the methods return type `PersonAggregate` which turns `Sort.by("lastname")` into `{ $sort : { '_id', 1 } }` because `PersonAggregate.lastname` is annotated with `@Id`.
|
||||
<3> Replaces `?0` with the given value for `property` for a dynamic aggregation pipeline.
|
||||
<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination.
|
||||
<5> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`.
|
||||
<6> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`.
|
||||
To gain more control, you might consider `AggregationResult` as method return type as shown in <7>.
|
||||
<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`.
|
||||
<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s.
|
||||
<10> Skips the output of the `$out` stage when return type is `void`.
|
||||
====
|
||||
|
||||
In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk.
|
||||
Use the `@Meta` annotation to set those options via `maxExecutionTimeMs`, `comment` or `allowDiskUse`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Meta(allowDiskUse = true)
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames();
|
||||
}
|
||||
----
|
||||
|
||||
Or use `@Meta` to create your own annotation as shown in the sample below.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.METHOD })
|
||||
@Meta(allowDiskUse = true)
|
||||
@interface AllowDiskUse { }
|
||||
|
||||
interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@AllowDiskUse
|
||||
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
|
||||
List<PersonAggregate> groupByLastnameAndFirstnames();
|
||||
}
|
||||
----
|
||||
|
||||
TIP: You can use `@Aggregation` also with <<mongo.reactive.repositories, Reactive Repositories>>.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Simple-type single-result inspects the returned `Document` and checks for the following:
|
||||
|
||||
. Only one entry in the document, return it.
|
||||
. Two entries, one is the `_id` value. Return the other.
|
||||
. Return for the first value assignable to the return type.
|
||||
. Throw an exception if none of the above is applicable.
|
||||
====
|
||||
|
||||
WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However, you can use a
|
||||
`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline and let the method return `Slice`.
|
||||
@@ -1,739 +0,0 @@
|
||||
[[mongo.repositories]]
|
||||
= MongoDB Repositories
|
||||
|
||||
[[mongo-repo-intro]]
|
||||
This chapter points out the specialties for repository support for MongoDB.
|
||||
This chapter builds on the core repository support explained in <<repositories>>.
|
||||
You should have a sound understanding of the basic concepts explained there.
|
||||
|
||||
[[mongo-repo-usage]]
|
||||
== Usage
|
||||
|
||||
To access domain entities stored in a MongoDB, you can use our sophisticated repository support that eases implementation quite significantly.
|
||||
To do so, create an interface for your repository, as the following example shows:
|
||||
|
||||
.Sample Person entity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
private String firstname;
|
||||
private String lastname;
|
||||
private Address address;
|
||||
|
||||
// … getters and setters omitted
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Note that the domain type shown in the preceding example has a property named `id` of type `String`.The default serialization mechanism used in `MongoTemplate` (which backs the repository support) regards properties named `id` as the document ID.
|
||||
Currently, we support `String`, `ObjectId`, and `BigInteger` as ID types.
|
||||
Please see <<mongo-template.id-handling, ID mapping>> for more information about on how the `id` field is handled in the mapping layer.
|
||||
|
||||
Now that we have a domain object, we can define an interface that uses it, as follows:
|
||||
|
||||
.Basic repository interface to persist Person entities
|
||||
====
|
||||
[source]
|
||||
----
|
||||
public interface PersonRepository extends PagingAndSortingRepository<Person, String> {
|
||||
|
||||
// additional custom query methods go here
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Right now this interface serves only to provide type information, but we can add additional methods to it later.
|
||||
|
||||
To start using the repository, use the `@EnableMongoRepositories` annotation.
|
||||
That annotation carries the same attributes as the namespace element.
|
||||
If no base package is configured, the infrastructure scans the package of the annotated configuration class.
|
||||
The following example shows how to configuration your application to use MongoDB repositories:
|
||||
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
----
|
||||
@Configuration
|
||||
@EnableMongoRepositories("com.acme.*.repositories")
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "e-store";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMappingBasePackage() {
|
||||
return "com.acme.*.repositories";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
.XML
|
||||
[source,xml,role="secondary"]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd">
|
||||
|
||||
<mongo:mongo-client id="mongoClient" />
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg ref="mongoClient" />
|
||||
<constructor-arg value="databaseName" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.*.repositories" />
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
This namespace element causes the base packages to be scanned for interfaces that extend `MongoRepository` and create Spring beans for each one found.
|
||||
By default, the repositories get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention.
|
||||
|
||||
Because our domain repository extends `PagingAndSortingRepository`, it provides you with CRUD operations as well as methods for paginated and sorted access to the entities.
|
||||
Working with the repository instance is just a matter of dependency injecting it into a client .
|
||||
Consequently, accessing the second page of `Person` objects at a page size of 10 would resemble the following code:
|
||||
|
||||
.Paging access to Person entities
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration
|
||||
class PersonRepositoryTests {
|
||||
|
||||
@Autowired PersonRepository repository;
|
||||
|
||||
@Test
|
||||
void readsFirstPageCorrectly() {
|
||||
|
||||
Page<Person> persons = repository.findAll(PageRequest.of(0, 10));
|
||||
assertThat(persons.isFirstPage()).isTrue();
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases.
|
||||
Inside the test method, we use the repository to query the datastore.
|
||||
We hand the repository a `PageRequest` instance that requests the first page of `Person` objects at a page size of 10.
|
||||
|
||||
[[mongodb.repositories.queries]]
|
||||
== Query Methods
|
||||
|
||||
Most of the data access operations you usually trigger on a repository result in a query being executed against the MongoDB databases.
|
||||
Defining such a query is a matter of declaring a method on the repository interface, as the following example shows:
|
||||
|
||||
.PersonRepository with query methods
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends PagingAndSortingRepository<Person, String> {
|
||||
|
||||
List<Person> findByLastname(String lastname); <1>
|
||||
|
||||
Page<Person> findByFirstname(String firstname, Pageable pageable); <2>
|
||||
|
||||
Person findByShippingAddresses(Address address); <3>
|
||||
|
||||
Person findFirstByLastname(String lastname) <4>
|
||||
|
||||
Stream<Person> findAllBy(); <5>
|
||||
}
|
||||
----
|
||||
|
||||
<1> The `findByLastname` method shows a query for all people with the given last name.
|
||||
The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`.
|
||||
Thus, the method name results in a query expression of `{"lastname" : lastname}`.
|
||||
<2> Applies pagination to a query.
|
||||
You can equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and Spring Data automatically pages the query accordingly.
|
||||
<3> Shows that you can query based on properties that are not primitive types.
|
||||
Throws `IncorrectResultSizeDataAccessException` if more than one match is found.
|
||||
<4> Uses the `First` keyword to restrict the query to only the first result.
|
||||
Unlike <3>, this method does not throw an exception if more than one match is found.
|
||||
<5> Uses a Java 8 `Stream` that reads and converts individual elements while iterating the stream.
|
||||
====
|
||||
|
||||
NOTE: We do not support referring to parameters that are mapped as `DBRef` in the domain class.
|
||||
|
||||
The following table shows the keywords that are supported for query methods:
|
||||
|
||||
[cols="1,2,3",options="header"]
|
||||
.Supported keywords for query methods
|
||||
|===
|
||||
| Keyword
|
||||
| Sample
|
||||
| Logical result
|
||||
|
||||
| `After`
|
||||
| `findByBirthdateAfter(Date date)`
|
||||
| `{"birthdate" : {"$gt" : date}}`
|
||||
|
||||
| `GreaterThan`
|
||||
| `findByAgeGreaterThan(int age)`
|
||||
| `{"age" : {"$gt" : age}}`
|
||||
|
||||
| `GreaterThanEqual`
|
||||
| `findByAgeGreaterThanEqual(int age)`
|
||||
| `{"age" : {"$gte" : age}}`
|
||||
|
||||
| `Before`
|
||||
| `findByBirthdateBefore(Date date)`
|
||||
| `{"birthdate" : {"$lt" : date}}`
|
||||
|
||||
| `LessThan`
|
||||
| `findByAgeLessThan(int age)`
|
||||
| `{"age" : {"$lt" : age}}`
|
||||
|
||||
| `LessThanEqual`
|
||||
| `findByAgeLessThanEqual(int age)`
|
||||
| `{"age" : {"$lte" : age}}`
|
||||
|
||||
| `Between`
|
||||
| `findByAgeBetween(int from, int to)` +
|
||||
`findByAgeBetween(Range<Integer> range)`
|
||||
| `{"age" : {"$gt" : from, "$lt" : to}}` +
|
||||
lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range`
|
||||
|
||||
| `In`
|
||||
| `findByAgeIn(Collection ages)`
|
||||
| `{"age" : {"$in" : [ages...]}}`
|
||||
|
||||
| `NotIn`
|
||||
| `findByAgeNotIn(Collection ages)`
|
||||
| `{"age" : {"$nin" : [ages...]}}`
|
||||
|
||||
| `IsNotNull`, `NotNull`
|
||||
| `findByFirstnameNotNull()`
|
||||
| `{"firstname" : {"$ne" : null}}`
|
||||
|
||||
| `IsNull`, `Null`
|
||||
| `findByFirstnameNull()`
|
||||
| `{"firstname" : null}`
|
||||
|
||||
| `Like`, `StartingWith`, `EndingWith`
|
||||
| `findByFirstnameLike(String name)`
|
||||
| `{"firstname" : name} (name as regex)`
|
||||
|
||||
| `NotLike`, `IsNotLike`
|
||||
| `findByFirstnameNotLike(String name)`
|
||||
| `{"firstname" : { "$not" : name }} (name as regex)`
|
||||
|
||||
| `Containing` on String
|
||||
| `findByFirstnameContaining(String name)`
|
||||
| `{"firstname" : name} (name as regex)`
|
||||
|
||||
| `NotContaining` on String
|
||||
| `findByFirstnameNotContaining(String name)`
|
||||
| `{"firstname" : { "$not" : name}} (name as regex)`
|
||||
|
||||
| `Containing` on Collection
|
||||
| `findByAddressesContaining(Address address)`
|
||||
| `{"addresses" : { "$in" : address}}`
|
||||
|
||||
| `NotContaining` on Collection
|
||||
| `findByAddressesNotContaining(Address address)`
|
||||
| `{"addresses" : { "$not" : { "$in" : address}}}`
|
||||
|
||||
| `Regex`
|
||||
| `findByFirstnameRegex(String firstname)`
|
||||
| `{"firstname" : {"$regex" : firstname }}`
|
||||
|
||||
| `(No keyword)`
|
||||
| `findByFirstname(String name)`
|
||||
| `{"firstname" : name}`
|
||||
|
||||
| `Not`
|
||||
| `findByFirstnameNot(String name)`
|
||||
| `{"firstname" : {"$ne" : name}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point)`
|
||||
| `{"location" : {"$near" : [x,y]}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point, Distance max)`
|
||||
| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point, Distance min, Distance max)`
|
||||
| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Circle circle)`
|
||||
| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Box box)`
|
||||
| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}`
|
||||
|
||||
| `IsTrue`, `True`
|
||||
| `findByActiveIsTrue()`
|
||||
| `{"active" : true}`
|
||||
|
||||
| `IsFalse`, `False`
|
||||
| `findByActiveIsFalse()`
|
||||
| `{"active" : false}`
|
||||
|
||||
| `Exists`
|
||||
| `findByLocationExists(boolean exists)`
|
||||
| `{"location" : {"$exists" : exists }}`
|
||||
|
||||
| `IgnoreCase`
|
||||
| `findByUsernameIgnoreCase(String username)`
|
||||
| `{"username" : {"$regex" : "^username$", "$options" : "i" }}`
|
||||
|===
|
||||
|
||||
NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters.
|
||||
|
||||
[[mongodb.repositories.queries.hint]]
|
||||
=== Repository Index Hints
|
||||
|
||||
The `@Hint` annotation allows to override MongoDB's default index selection and forces the database to use the specified index instead.
|
||||
|
||||
.Example of index hints
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Hint("lastname-idx") <1>
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
@Query(value = "{ 'firstname' : ?0 }", hint = "firstname-idx") <2>
|
||||
List<Person> findByFirstname(String firstname);
|
||||
----
|
||||
|
||||
<1> Use the index with name `lastname-idx`.
|
||||
<2> The `@Query` annotation defines the `hint` alias which is equivalent to adding the `@Hint` annotation.
|
||||
====
|
||||
|
||||
[[mongodb.repositories.queries.update]]
|
||||
=== Repository Update Methods
|
||||
|
||||
You can also use the keywords in the preceding table to create queries that identify matching documents for running updates on them.
|
||||
The actual update action is defined by the `@Update` annotation on the method itself, as the following listing shows.
|
||||
Note that the naming schema for derived queries starts with `find`.
|
||||
Using `update` (as in `updateAllByLastname(...)`) is allowed only in combination with `@Query`.
|
||||
|
||||
The update is applied to *all* matching documents and it is *not* possible to limit the scope by passing in a `Page` or by using any of the <<repositories.limit-query-result,limiting keywords>>.
|
||||
The return type can be either `void` or a _numeric_ type, such as `long`, to hold the number of modified documents.
|
||||
|
||||
.Update Methods
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : 1 } }")
|
||||
long findAndIncrementVisitsByLastname(String lastname); <1>
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void findAndIncrementVisitsByLastname(String lastname, int increment); <2>
|
||||
|
||||
@Update("{ '$inc' : { 'visits' : ?#{[1]} } }")
|
||||
long findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); <3>
|
||||
|
||||
@Update(pipeline = {"{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }"})
|
||||
void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); <4>
|
||||
|
||||
@Update("{ '$push' : { 'shippingAddresses' : ?1 } }")
|
||||
long findAndPushShippingAddressByEmail(String email, Address address); <5>
|
||||
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void updateAllByLastname(String lastname, int increment); <6>
|
||||
}
|
||||
----
|
||||
|
||||
<1> The filter query for the update is derived from the method name.
|
||||
The update is "`as is`" and does not bind any parameters.
|
||||
<2> The actual increment value is defined by the `increment` method argument that is bound to the `?1` placeholder.
|
||||
<3> Use the Spring Expression Language (SpEL) for parameter binding.
|
||||
<4> Use the `pipeline` attribute to issue <<mongo-template.aggregation-update,aggregation pipeline updates>>.
|
||||
<5> The update may contain complex objects.
|
||||
<6> Combine a <<mongodb.repositories.queries.json-based,string based query>> with an update.
|
||||
====
|
||||
|
||||
WARNING: Repository updates do not emit persistence nor mapping lifecycle events.
|
||||
|
||||
[[mongodb.repositories.queries.delete]]
|
||||
=== Repository Delete Queries
|
||||
|
||||
The keywords in the preceding table can be used in conjunction with `delete…By` or `remove…By` to create queries that delete matching documents.
|
||||
|
||||
.`Delete…By` Query
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
List <Person> deleteByLastname(String lastname); <1>
|
||||
|
||||
Long deletePersonByLastname(String lastname); <2>
|
||||
|
||||
@Nullable
|
||||
Person deleteSingleByLastname(String lastname); <3>
|
||||
|
||||
Optional<Person> deleteByBirthdate(Date birthdate); <4>
|
||||
}
|
||||
----
|
||||
|
||||
<1> Using a return type of `List` retrieves and returns all matching documents before actually deleting them.
|
||||
<2> A numeric return type directly removes the matching documents, returning the total number of documents removed.
|
||||
<3> A single domain type result retrieves and removes the first matching document.
|
||||
<4> Same as in 3 but wrapped in an `Optional` type.
|
||||
====
|
||||
|
||||
[[mongodb.repositories.queries.geo-spatial]]
|
||||
=== Geo-spatial Repository Queries
|
||||
|
||||
As you saw in the preceding table of keywords, a few keywords trigger geo-spatial operations within a MongoDB query.
|
||||
The `Near` keyword allows some further modification, as the next few examples show.
|
||||
|
||||
The following example shows how to define a `near` query that finds all persons with a given distance of a given point:
|
||||
|
||||
.Advanced `Near` queries
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
// { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}}
|
||||
List<Person> findByLocationNear(Point location, Distance distance);
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Adding a `Distance` parameter to the query method allows restricting results to those within the given distance.
|
||||
If the `Distance` was set up containing a `Metric`, we transparently use `$nearSphere` instead of `$code`, as the following example shows:
|
||||
|
||||
.Using `Distance` with `Metrics`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(43.7, 48.8);
|
||||
Distance distance = new Distance(200, Metrics.KILOMETERS);
|
||||
… = repository.findByLocationNear(point, distance);
|
||||
// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}}
|
||||
----
|
||||
====
|
||||
|
||||
Using a `Distance` with a `Metric` causes a `$nearSphere` (instead of a plain `$near`) clause to be added.
|
||||
Beyond that, the actual distance gets calculated according to the `Metrics` used.
|
||||
|
||||
(Note that `Metric` does not refer to metric units of measure.
|
||||
It could be miles rather than kilometers.
|
||||
Rather, `metric` refers to the concept of a system of measurement, regardless of which system you use.)
|
||||
|
||||
NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of the `$nearSphere` operator.
|
||||
|
||||
==== Geo-near Queries
|
||||
|
||||
Spring Data MongoDb supports geo-near queries, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
GeoResults<Person> findByLocationNear(Point location);
|
||||
|
||||
// No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance }
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance,
|
||||
// 'distanceMultiplier' : metric.multiplier, 'spherical' : true }
|
||||
GeoResults<Person> findByLocationNear(Point location, Distance distance);
|
||||
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min,
|
||||
// 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier,
|
||||
// 'spherical' : true }
|
||||
GeoResults<Person> findByLocationNear(Point location, Distance min, Distance max);
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
GeoResults<Person> findByLocationNear(Point location);
|
||||
}
|
||||
----
|
||||
|
||||
[[mongodb.repositories.queries.json-based]]
|
||||
=== MongoDB JSON-based Query Methods and Field Restriction
|
||||
|
||||
By adding the `org.springframework.data.mongodb.repository.Query` annotation to your repository query methods, you can specify a MongoDB JSON query string to use instead of having the query be derived from the method name, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{ 'firstname' : ?0 }")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
The `?0` placeholder lets you substitute the value from the method arguments into the JSON query string.
|
||||
|
||||
NOTE: `String` parameter values are escaped during the binding process, which means that it is not possible to add MongoDB specific operators through the argument.
|
||||
|
||||
You can also use the filter property to restrict the set of properties that is mapped into the Java object, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
The query in the preceding example returns only the `firstname`, `lastname` and `Id` properties of the `Person` objects.
|
||||
The `age` property, a `java.lang.Integer`, is not set and its value is therefore null.
|
||||
|
||||
[[mongodb.repositories.queries.sort]]
|
||||
=== Sorting Query Method results
|
||||
|
||||
MongoDB repositories allow various approaches to define sorting order.
|
||||
Let's take a look at the following example:
|
||||
|
||||
.Sorting Query Results
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
List<Person> findByFirstnameSortByAgeDesc(String firstname); <1>
|
||||
|
||||
List<Person> findByFirstname(String firstname, Sort sort); <2>
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
List<Person> findByFirstname(String firstname); <3>
|
||||
|
||||
@Query(sort = "{ age : -1 }")
|
||||
List<Person> findByLastname(String lastname, Sort sort); <4>
|
||||
}
|
||||
----
|
||||
|
||||
<1> Static sorting derived from method name. `SortByAgeDesc` results in `{ age : -1 }` for the sort parameter.
|
||||
<2> Dynamic sorting using a method argument.
|
||||
`Sort.by(DESC, "age")` creates `{ age : -1 }` for the sort parameter.
|
||||
<3> Static sorting via `Query` annotation.
|
||||
Sort parameter applied as stated in the `sort` attribute.
|
||||
<4> Default sorting via `Query` annotation combined with dynamic one via a method argument. `Sort.unsorted()`
|
||||
results in `{ age : -1 }`.
|
||||
Using `Sort.by(ASC, "age")` overrides the defaults and creates `{ age : 1 }`.
|
||||
`Sort.by
|
||||
(ASC, "firstname")` alters the default and results in `{ age : -1, firstname : 1 }`.
|
||||
====
|
||||
|
||||
[[mongodb.repositories.queries.json-spel]]
|
||||
=== JSON-based Queries with SpEL Expressions
|
||||
|
||||
Query strings and field definitions can be used together with SpEL expressions to create dynamic queries at runtime.
|
||||
SpEL expressions can provide predicate values and can be used to extend predicates with subdocuments.
|
||||
|
||||
Expressions expose method arguments through an array that contains all the arguments.
|
||||
The following query uses `[0]`
|
||||
to declare the predicate value for `lastname` (which is equivalent to the `?0` parameter binding):
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{'lastname': ?#{[0]} }")
|
||||
List<Person> findByQueryWithExpression(String param0);
|
||||
}
|
||||
----
|
||||
|
||||
Expressions can be used to invoke functions, evaluate conditionals, and construct values.
|
||||
SpEL expressions used in conjunction with JSON reveal a side-effect, because Map-like declarations inside of SpEL read like JSON, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String> {
|
||||
|
||||
@Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}")
|
||||
List<Person> findByQueryWithExpressionAndNestedObject(boolean param0, String param1);
|
||||
}
|
||||
----
|
||||
|
||||
WARNING: SpEL in query strings can be a powerful way to enhance queries.
|
||||
However, they can also accept a broad range of unwanted arguments.
|
||||
Make sure to sanitize strings before passing them to the query to avoid creation of vulnerabilities or unwanted changes to your query.
|
||||
|
||||
Expression support is extensible through the Query SPI: `org.springframework.data.repository.query.spi.EvaluationContextExtension`.
|
||||
The Query SPI can contribute properties and functions and can customize the root object.
|
||||
Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built.
|
||||
The following example shows how to use `EvaluationContextExtension`:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport {
|
||||
|
||||
@Override
|
||||
public String getExtensionId() {
|
||||
return "security";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getProperties() {
|
||||
return Collections.singletonMap("principal", SecurityContextHolder.getCurrent().getPrincipal());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: Bootstrapping `MongoRepositoryFactory` yourself is not application context-aware and requires further configuration to pick up Query SPI extensions.
|
||||
|
||||
NOTE: Reactive query methods can make use of `org.springframework.data.spel.spi.ReactiveEvaluationContextExtension`.
|
||||
|
||||
[[mongodb.repositories.queries.type-safe]]
|
||||
=== Type-safe Query Methods
|
||||
|
||||
MongoDB repository support integrates with the http://www.querydsl.com/[Querydsl] project, which provides a way to perform type-safe queries.
|
||||
To quote from the project description, "Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API." It provides the following features:
|
||||
|
||||
* Code completion in the IDE (all properties, methods, and operations can be expanded in your favorite Java IDE).
|
||||
* Almost no syntactically invalid queries allowed (type-safe on all levels).
|
||||
* Domain types and properties can be referenced safely -- no strings involved!
|
||||
* Adapts better to refactoring changes in domain types.
|
||||
* Incremental query definition is easier.
|
||||
|
||||
See the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] for how to bootstrap your environment for APT-based code generation using Maven or Ant.
|
||||
|
||||
QueryDSL lets you write queries such as the following:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
QPerson person = new QPerson("person");
|
||||
List<Person> result = repository.findAll(person.address.zipCode.eq("C0123"));
|
||||
|
||||
Page<Person> page = repository.findAll(person.lastname.contains("a"),
|
||||
PageRequest.of(0, 2, Direction.ASC, "lastname"));
|
||||
----
|
||||
|
||||
`QPerson` is a class that is generated by the Java annotation post-processing tool.
|
||||
It is a `Predicate` that lets you write type-safe queries.
|
||||
Notice that there are no strings in the query other than the `C0123` value.
|
||||
|
||||
You can use the generated `Predicate` class by using the `QuerydslPredicateExecutor` interface, which the following listing shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface QuerydslPredicateExecutor<T> {
|
||||
|
||||
T findOne(Predicate predicate);
|
||||
|
||||
List<T> findAll(Predicate predicate);
|
||||
|
||||
List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders);
|
||||
|
||||
Page<T> findAll(Predicate predicate, Pageable pageable);
|
||||
|
||||
Long count(Predicate predicate);
|
||||
}
|
||||
----
|
||||
|
||||
To use this in your repository implementation, add it to the list of repository interfaces from which your interface inherits, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends MongoRepository<Person, String>, QuerydslPredicateExecutor<Person> {
|
||||
|
||||
// additional query methods go here
|
||||
}
|
||||
----
|
||||
|
||||
[[mongodb.repositories.queries.full-text]]
|
||||
=== Full-text Search Queries
|
||||
|
||||
MongoDB's full-text search feature is store-specific and, therefore, can be found on `MongoRepository` rather than on the more general `CrudRepository`.
|
||||
We need a document with a full-text index (see "`<<mapping-usage-indexes.text-index>>`" to learn how to create a full-text index).
|
||||
|
||||
Additional methods on `MongoRepository` take `TextCriteria` as an input parameter.
|
||||
In addition to those explicit methods, it is also possible to add a `TextCriteria`-derived repository method.
|
||||
The criteria are added as an additional `AND` criteria.
|
||||
Once the entity contains a `@TextScore`-annotated property, the document's full-text score can be retrieved.
|
||||
Furthermore, the `@TextScore` annotated also makes it possible to sort by the document's score, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
class FullTextDocument {
|
||||
|
||||
@Id String id;
|
||||
@TextIndexed String title;
|
||||
@TextIndexed String content;
|
||||
@TextScore Float score;
|
||||
}
|
||||
|
||||
interface FullTextRepository extends Repository<FullTextDocument, String> {
|
||||
|
||||
// Execute a full-text search and define sorting dynamically
|
||||
List<FullTextDocument> findAllBy(TextCriteria criteria, Sort sort);
|
||||
|
||||
// Paginate over a full-text search result
|
||||
Page<FullTextDocument> findAllBy(TextCriteria criteria, Pageable pageable);
|
||||
|
||||
// Combine a derived query with a full-text search
|
||||
List<FullTextDocument> findByTitleOrderByScoreDesc(String title, TextCriteria criteria);
|
||||
}
|
||||
|
||||
|
||||
Sort sort = Sort.by("score");
|
||||
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data");
|
||||
List<FullTextDocument> result = repository.findAllBy(criteria, sort);
|
||||
|
||||
criteria = TextCriteria.forDefaultLanguage().matching("film");
|
||||
Page<FullTextDocument> page = repository.findAllBy(criteria, PageRequest.of(1, 1, sort));
|
||||
List<FullTextDocument> result = repository.findByTitleOrderByScoreDesc("mongodb", criteria);
|
||||
----
|
||||
|
||||
include::../{spring-data-commons-docs}/repository-projections.adoc[leveloffset=+2]
|
||||
|
||||
include::./mongo-repositories-aggregation.adoc[]
|
||||
|
||||
[[mongodb.repositories.misc.cdi-integration]]
|
||||
== CDI Integration
|
||||
|
||||
Instances of the repository interfaces are usually created by a container, and Spring is the most natural choice when working with Spring Data.
|
||||
As of version 1.3.0, Spring Data MongoDB ships with a custom CDI extension that lets you use the repository abstraction in CDI environments.
|
||||
The extension is part of the JAR.
|
||||
To activate it, drop the Spring Data MongoDB JAR into your classpath.
|
||||
You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class MongoTemplateProducer {
|
||||
|
||||
@Produces
|
||||
@ApplicationScoped
|
||||
public MongoOperations createMongoTemplate() {
|
||||
|
||||
MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database");
|
||||
return new MongoTemplate(factory);
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The Spring Data MongoDB CDI extension picks up the `MongoTemplate` available as a CDI bean and creates a proxy for a Spring Data repository whenever a bean of a repository type is requested by the container.
|
||||
Thus, obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class RepositoryClient {
|
||||
|
||||
@Inject
|
||||
PersonRepository repository;
|
||||
|
||||
public void businessMethod() {
|
||||
List<Person> people = repository.findAll();
|
||||
}
|
||||
}
|
||||
----
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,106 +0,0 @@
|
||||
[[query-by-example.running]]
|
||||
== Running an Example
|
||||
|
||||
The following example shows how to query by example when using a repository (of `Person` objects, in this case):
|
||||
|
||||
.Query by Example using a repository
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
public interface PersonRepository extends QueryByExampleExecutor<Person> {
|
||||
|
||||
}
|
||||
|
||||
public class PersonService {
|
||||
|
||||
@Autowired PersonRepository personRepository;
|
||||
|
||||
public List<Person> findPeople(Person probe) {
|
||||
return personRepository.findAll(Example.of(probe));
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
An `Example` containing an untyped `ExampleSpec` uses the Repository type and its collection name. Typed `ExampleSpec` instances use their type as the result type and the collection name from the `Repository` instance.
|
||||
|
||||
NOTE: When including `null` values in the `ExampleSpec`, Spring Data Mongo uses embedded document matching instead of dot notation property matching. Doing so forces exact document matching for all property values and the property order in the embedded document.
|
||||
|
||||
Spring Data MongoDB provides support for the following matching options:
|
||||
|
||||
[cols="1,2", options="header"]
|
||||
.`StringMatcher` options
|
||||
|===
|
||||
| Matching
|
||||
| Logical result
|
||||
|
||||
| `DEFAULT` (case-sensitive)
|
||||
| `{"firstname" : firstname}`
|
||||
|
||||
| `DEFAULT` (case-insensitive)
|
||||
| `{"firstname" : { $regex: firstname, $options: 'i'}}`
|
||||
|
||||
| `EXACT` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /^firstname$/}}`
|
||||
|
||||
| `EXACT` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}`
|
||||
|
||||
| `STARTING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /^firstname/}}`
|
||||
|
||||
| `STARTING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}`
|
||||
|
||||
| `ENDING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /firstname$/}}`
|
||||
|
||||
| `ENDING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}`
|
||||
|
||||
| `CONTAINING` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /.\*firstname.*/}}`
|
||||
|
||||
| `CONTAINING` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}`
|
||||
|
||||
| `REGEX` (case-sensitive)
|
||||
| `{"firstname" : { $regex: /firstname/}}`
|
||||
|
||||
| `REGEX` (case-insensitive)
|
||||
| `{"firstname" : { $regex: /firstname/, $options: 'i'}}`
|
||||
|
||||
|===
|
||||
|
||||
[[query-by-example.untyped]]
|
||||
== Untyped Example
|
||||
|
||||
By default `Example` is strictly typed. This means that the mapped query has an included type match, restricting it to probe assignable types. For example, when sticking with the default type key (`_class`), the query has restrictions such as (`_class : { $in : [ com.acme.Person] }`).
|
||||
|
||||
By using the `UntypedExampleMatcher`, it is possible to bypass the default behavior and skip the type restriction. So, as long as field names match, nearly any domain type can be used as the probe for creating the reference, as the following example shows:
|
||||
|
||||
.Untyped Example Query
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
|
||||
class JustAnArbitraryClassWithMatchingFieldName {
|
||||
@Field("lastname") String value;
|
||||
}
|
||||
|
||||
JustAnArbitraryClassWithMatchingFieldNames probe = new JustAnArbitraryClassWithMatchingFieldNames();
|
||||
probe.value = "stark";
|
||||
|
||||
Example example = Example.of(probe, UntypedExampleMatcher.matching());
|
||||
|
||||
Query query = new Query(new Criteria().alike(example));
|
||||
List<Person> result = template.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
`UntypedExampleMatcher` is likely the right choice for you if you are storing different entities within a single collection or opted out of writing <<mongo-template.type-mapping,type hints>>.
|
||||
|
||||
Also, keep in mind that using `@TypeAlias` requires eager initialization of the `MappingContext`. To do so, configure `initialEntitySet` to to ensure proper alias resolution for read operations.
|
||||
====
|
||||
@@ -1,290 +0,0 @@
|
||||
[[mongo.reactive.repositories]]
|
||||
= Reactive MongoDB repositories
|
||||
|
||||
This chapter describes the specialties for reactive repository support for MongoDB. This chapter builds on the core repository support explained in <<repositories>>. You should have a sound understanding of the basic concepts explained there.
|
||||
|
||||
[[mongo.reactive.repositories.libraries]]
|
||||
== Reactive Composition Libraries
|
||||
|
||||
The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor].
|
||||
|
||||
Spring Data MongoDB is built on top of the https://mongodb.github.io/mongo-java-driver-reactivestreams/[MongoDB Reactive Streams] driver, to provide maximal interoperability by relying on the https://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs, such as `ReactiveMongoOperations`, are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa), but conversion can easily clutter your code.
|
||||
|
||||
Spring Data's Repository abstraction is a dynamic API, mostly defined by you and your requirements as you declare query methods. Reactive MongoDB repositories can be implemented by using either RxJava or Project Reactor wrapper types by extending from one of the following library-specific repository interfaces:
|
||||
|
||||
* `ReactiveCrudRepository`
|
||||
* `ReactiveSortingRepository`
|
||||
* `RxJava2CrudRepository`
|
||||
* `RxJava2SortingRepository`
|
||||
* `RxJava3CrudRepository`
|
||||
* `RxJava3SortingRepository`
|
||||
|
||||
Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library.
|
||||
|
||||
[[mongo.reactive.repositories.usage]]
|
||||
== Usage
|
||||
|
||||
To access domain entities stored in a MongoDB database, you can use our sophisticated repository support that eases implementing those quite significantly. To do so, create an interface similar for your repository. Before you can do that, though, you need an entity, such as the entity defined in the following example:
|
||||
|
||||
.Sample `Person` entity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
private String firstname;
|
||||
private String lastname;
|
||||
private Address address;
|
||||
|
||||
// … getters and setters omitted
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Note that the entity defined in the preceding example has a property named `id` of type `String`. The default serialization mechanism used in `MongoTemplate` (which backs the repository support) regards properties named `id` as the document ID. Currently, we support `String`, `ObjectId`, and `BigInteger` as id-types.
|
||||
Please see <<mongo-template.id-handling, ID mapping>> for more information about on how the `id` field is handled in the mapping layer.
|
||||
|
||||
The following example shows how to create an interface that defines queries against the `Person` object from the preceding example:
|
||||
|
||||
.Basic repository interface to persist Person entities
|
||||
====
|
||||
[source]
|
||||
----
|
||||
public interface ReactivePersonRepository extends ReactiveSortingRepository<Person, String> {
|
||||
|
||||
Flux<Person> findByFirstname(String firstname); <1>
|
||||
|
||||
Flux<Person> findByFirstname(Publisher<String> firstname); <2>
|
||||
|
||||
Flux<Person> findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3>
|
||||
|
||||
Mono<Person> findByFirstnameAndLastname(String firstname, String lastname); <4>
|
||||
|
||||
Mono<Person> findFirstByLastname(String lastname); <5>
|
||||
}
|
||||
----
|
||||
<1> The method shows a query for all people with the given `lastname`. The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. Thus, the method name results in a query expression of `{"lastname" : lastname}`.
|
||||
<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`.
|
||||
<3> Use `Pageable` to pass offset and sorting parameters to the database.
|
||||
<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results.
|
||||
<5> Unless <4>, the first entity is always emitted even if the query yields more result documents.
|
||||
====
|
||||
|
||||
For Java configuration, use the `@EnableReactiveMongoRepositories` annotation. The annotation carries the same attributes as the namespace element. If no base package is configured, the infrastructure scans the package of the annotated configuration class.
|
||||
|
||||
NOTE: MongoDB uses two different drivers for imperative (synchronous/blocking) and reactive (non-blocking) data access. You must create a connection by using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support. Consequently, you must provide a separate configuration for MongoDB's Reactive Streams driver. Note that your application operates on two different connections if you use reactive and blocking Spring Data MongoDB templates and repositories.
|
||||
|
||||
The following listing shows how to use Java configuration for a repository:
|
||||
|
||||
.Java configuration for repositories
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
@EnableReactiveMongoRepositories
|
||||
class ApplicationConfig extends AbstractReactiveMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "e-store";
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMappingBasePackage() {
|
||||
return "com.oreilly.springdata.mongodb";
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Because our domain repository extends `ReactiveSortingRepository`, it provides you with CRUD operations as well as methods for sorted access to the entities. Working with the repository instance is a matter of dependency injecting it into a client, as the following example shows:
|
||||
|
||||
.Sorted access to Person entities
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration
|
||||
class PersonRepositoryTests {
|
||||
|
||||
@Autowired ReactivePersonRepository repository;
|
||||
|
||||
@Test
|
||||
public void sortsElementsCorrectly() {
|
||||
Flux<Person> persons = repository.findAll(Sort.by(new Order(ASC, "lastname")));
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
WARNING: The `Page` return type (as in `Mono<Page>`) is not supported by reactive repositories.
|
||||
|
||||
It is possible to use `Pageable` in derived finder methods, to pass on `sort`, `limit` and `offset` parameters to the query to reduce load and network traffic.
|
||||
The returned `Flux` will only emit data within the declared range.
|
||||
|
||||
.Limit and Offset with reactive repositories
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Pageable page = PageRequest.of(1, 10, Sort.by("lastname"));
|
||||
Flux<Person> persons = repository.findByFirstnameOrderByLastname("luke", page);
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.reactive.repositories.features]]
|
||||
== Features
|
||||
|
||||
Spring Data's Reactive MongoDB support comes with a reduced feature set compared to the blocking <<mongo.repositories,MongoDB Repositories>>.
|
||||
|
||||
It supports the following features:
|
||||
|
||||
* Query Methods using <<mongodb.repositories.queries,String queries and Query Derivation>>
|
||||
* <<mongodb.reactive.repositories.queries.geo-spatial>>
|
||||
* <<mongodb.repositories.queries.delete>>
|
||||
* <<mongodb.repositories.queries.json-based>>
|
||||
* <<mongodb.repositories.queries.full-text>>
|
||||
* <<mongodb.reactive.repositories.queries.type-safe>>
|
||||
* <<projections>>
|
||||
|
||||
[[mongodb.reactive.repositories.queries.geo-spatial]]
|
||||
=== Geo-spatial Repository Queries
|
||||
|
||||
As you saw earlier in "`<<mongodb.reactive.repositories.queries.geo-spatial>>`", a few keywords trigger geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification, as the next few examples show.
|
||||
|
||||
The following example shows how to define a `near` query that finds all persons with a given distance of a given point:
|
||||
|
||||
.Advanced `Near` queries
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
// { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}}
|
||||
Flux<Person> findByLocationNear(Point location, Distance distance);
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric`, we transparently use `$nearSphere` instead of `$code`, as the following example shows:
|
||||
|
||||
.Using `Distance` with `Metrics`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Point point = new Point(43.7, 48.8);
|
||||
Distance distance = new Distance(200, Metrics.KILOMETERS);
|
||||
… = repository.findByLocationNear(point, distance);
|
||||
// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult<T>` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. Howevery, you can still pass in a `Pageable` argument to page results yourself.
|
||||
|
||||
Using a `Distance` with a `Metric` causes a `$nearSphere` (instead of a plain `$near`) clause to be added. Beyond that, the actual distance gets calculated according to the `Metrics` used.
|
||||
|
||||
(Note that `Metric` does not refer to metric units of measure. It could be miles rather than kilometers. Rather, `metric` refers to the concept of a system of measurement, regardless of which system you use.)
|
||||
|
||||
NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator.
|
||||
|
||||
==== Geo-near Queries
|
||||
|
||||
Spring Data MongoDB supports geo-near queries, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location);
|
||||
|
||||
// No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance }
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance,
|
||||
// 'distanceMultiplier' : metric.multiplier, 'spherical' : true }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance distance);
|
||||
|
||||
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min,
|
||||
// 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier,
|
||||
// 'spherical' : true }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance min, Distance max);
|
||||
|
||||
// {'geoNear' : 'location', 'near' : [x, y] }
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location);
|
||||
}
|
||||
----
|
||||
|
||||
[[mongodb.reactive.repositories.queries.type-safe]]
|
||||
=== Type-safe Query Methods
|
||||
|
||||
Reactive MongoDB repository support integrates with the http://www.querydsl.com/[Querydsl] project, which provides a way to perform type-safe queries.
|
||||
|
||||
[quote, Querydsl Team]
|
||||
Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API.
|
||||
|
||||
It provides the following features:
|
||||
|
||||
* Code completion in the IDE (all properties, methods, and operations can be expanded in your favorite Java IDE).
|
||||
* Almost no syntactically invalid queries allowed (type-safe on all levels).
|
||||
* Domain types and properties can be referenced safely -- no strings involved!
|
||||
* Adapts better to refactoring changes in domain types.
|
||||
* Incremental query definition is easier.
|
||||
|
||||
See the http://www.querydsl.com/static/querydsl/latest/reference/html/[Querydsl documentation] for how to bootstrap your environment for APT-based code generation using Maven or Ant.
|
||||
|
||||
The Querydsl repository support lets you write and run queries, such as the following:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
QPerson person = QPerson.person;
|
||||
|
||||
Flux<Person> result = repository.findAll(person.address.zipCode.eq("C0123"));
|
||||
----
|
||||
|
||||
`QPerson` is a class that is generated by the Java annotation post-processing tool. It is a `Predicate` that lets you write type-safe queries.
|
||||
Note that there are no strings in the query other than the `C0123` value.
|
||||
|
||||
You can use the generated `Predicate` class by using the `ReactiveQuerydslPredicateExecutor` interface, which the following listing shows:
|
||||
|
||||
.The Gateway to Reactive Querydsl - The ReactiveQuerydslPredicateExecutor
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
interface ReactiveQuerydslPredicateExecutor<T> {
|
||||
|
||||
Mono<T> findOne(Predicate predicate);
|
||||
|
||||
Flux<T> findAll(Predicate predicate);
|
||||
|
||||
Flux<T> findAll(Predicate predicate, Sort sort);
|
||||
|
||||
Flux<T> findAll(Predicate predicate, OrderSpecifier<?>... orders);
|
||||
|
||||
Flux<T> findAll(OrderSpecifier<?>... orders);
|
||||
|
||||
Mono<Long> count(Predicate predicate);
|
||||
|
||||
Mono<Boolean> exists(Predicate predicate);
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
To use this in your repository implementation, add it to the list of repository interfaces from which your interface inherits, as the following example shows:
|
||||
|
||||
.Reactive Querydsl Respository Declaration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
interface PersonRepository extends ReactiveMongoRepository<Person, String>, ReactiveQuerydslPredicateExecutor<Person> {
|
||||
|
||||
// additional query methods go here
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Please note that joins (DBRef's) are not supported with Reactive MongoDB support.
|
||||
@@ -1,577 +0,0 @@
|
||||
[[mongo.reactive]]
|
||||
= Reactive MongoDB support
|
||||
|
||||
The reactive MongoDB support contains the following basic set of features:
|
||||
|
||||
* Spring configuration support that uses Java-based `@Configuration` classes, a `MongoClient` instance, and replica sets.
|
||||
* `ReactiveMongoTemplate`, which is a helper class that increases productivity by using `MongoOperations` in a reactive manner. It includes integrated object mapping between `Document` instances and POJOs.
|
||||
* Exception translation into Spring's portable Data Access Exception hierarchy.
|
||||
* Feature-rich Object Mapping integrated with Spring's `ConversionService`.
|
||||
* Annotation-based mapping metadata that is extensible to support other metadata formats.
|
||||
* Persistence and mapping lifecycle events.
|
||||
* Java based `Query`, `Criteria`, and `Update` DSLs.
|
||||
* Automatic implementation of reactive repository interfaces including support for custom query methods.
|
||||
|
||||
For most tasks, you should use `ReactiveMongoTemplate` or the repository support, both of which use the rich mapping functionality. `ReactiveMongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. `ReactiveMongoTemplate` also provides callback methods so that you can use the low-level API artifacts (such as `MongoDatabase`) to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so that you can map your existing knowledge onto the Spring APIs.
|
||||
|
||||
[[mongodb-reactive-getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 8 or higher.
|
||||
|
||||
First, you need to set up a running MongoDB server. Refer to the https://docs.mongodb.org/manual/core/introduction/[MongoDB Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed, starting MongoDB is typically a matter of running the following command: `${MONGO_HOME}/bin/mongod`
|
||||
|
||||
To create a Spring project in STS, go to File -> New -> Spring Template Project -> Simple Spring Utility Project and press Yes when prompted. Then enter a project and a package name, such as org.spring.mongodb.example.
|
||||
|
||||
Then add the following to the pom.xml dependencies section.
|
||||
|
||||
[source,xml,subs="+attributes"]
|
||||
----
|
||||
<dependencies>
|
||||
|
||||
<!-- other dependency elements omitted -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>{version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>{mongo-reactivestreams}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<version>{reactor}</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
----
|
||||
|
||||
NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. While blocking operations are provided by default, you can opt-in for reactive usage.
|
||||
|
||||
To get started with a working example, create a simple `Person` class to persist, as follows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
public class Person {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private int age;
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Then create an application to run, as follows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class ReactiveMongoApp {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database");
|
||||
|
||||
mongoOps.insert(new Person("Joe", 34))
|
||||
.flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class))
|
||||
.doOnNext(person -> log.info(person.toString()))
|
||||
.flatMap(person -> mongoOps.dropCollection("person"))
|
||||
.doOnComplete(latch::countDown)
|
||||
.subscribe();
|
||||
|
||||
latch.await();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Running the preceding class produces the following output:
|
||||
|
||||
[source]
|
||||
----
|
||||
2016-09-20 14:56:57,373 DEBUG .index.MongoPersistentEntityIndexCreator: 124 - Analyzing class class example.ReactiveMongoApp$Person for index information.
|
||||
2016-09-20 14:56:57,452 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 975 - Inserting Document containing fields: [_class, name, age] in collection: person
|
||||
2016-09-20 14:56:57,541 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1503 - findOne using query: { "name" : "Joe"} fields: null for class: class example.ReactiveMongoApp$Person in collection: person
|
||||
2016-09-20 14:56:57,545 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1979 - findOne using query: { "name" : "Joe"} in db.collection: database.person
|
||||
2016-09-20 14:56:57,567 INFO example.ReactiveMongoApp: 43 - Person [id=57e1321977ac501c68d73104, name=Joe, age=34]
|
||||
2016-09-20 14:56:57,573 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 528 - Dropped collection [person]
|
||||
----
|
||||
|
||||
Even in this simple example, there are a few things to take notice of:
|
||||
|
||||
* You can instantiate the central helper class of Spring Mongo (<<mongo.reactive.template,`ReactiveMongoTemplate`>>) by using the standard `com.mongodb.reactivestreams.client.MongoClient` object and the name of the database to use.
|
||||
* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <<mapping-chapter,here>>.).
|
||||
* Conventions are used for handling the ID field, converting it to be an `ObjectId` when stored in the database.
|
||||
* Mapping conventions can use field access. Notice that the `Person` class has only getters.
|
||||
* If the constructor argument names match the field names of the stored document, they are used to instantiate the object
|
||||
|
||||
There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works.
|
||||
|
||||
[[mongo.reactive.driver]]
|
||||
== Connecting to MongoDB with Spring and the Reactive Streams Driver
|
||||
|
||||
One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.reactivestreams.client.MongoClient` object by using the IoC container.
|
||||
|
||||
[[mongo.reactive.mongo-java-config]]
|
||||
=== Registering a MongoClient Instance Using Java-based Metadata
|
||||
|
||||
The following example shows how to use Java-based bean metadata to register an instance of a `com.mongodb.reactivestreams.client.MongoClient`:
|
||||
|
||||
.Registering a `com.mongodb.reactivestreams.client.MongoClient` object using Java based bean metadata
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Use the Reactive Streams Mongo Client API to create a com.mongodb.reactivestreams.client.MongoClient instance.
|
||||
*/
|
||||
public @Bean MongoClient reactiveMongoClient() {
|
||||
return MongoClients.create("mongodb://localhost");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
This approach lets you use the standard `com.mongodb.reactivestreams.client.MongoClient` API (which you may already know).
|
||||
|
||||
An alternative is to register an instance of `com.mongodb.reactivestreams.client.MongoClient` instance with the container by using Spring's `ReactiveMongoClientFactoryBean`. As compared to instantiating a `com.mongodb.reactivestreams.client.MongoClient` instance directly, the `FactoryBean` approach has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in link:{springDocsUrl}/data-access.html[Spring's DAO support features].
|
||||
|
||||
The following example shows Java-based bean metadata that supports exception translation on `@Repository` annotated classes:
|
||||
|
||||
.Registering a `com.mongodb.reactivestreams.client.MongoClient` object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
/*
|
||||
* Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance
|
||||
*/
|
||||
public @Bean ReactiveMongoClientFactoryBean mongoClient() {
|
||||
|
||||
ReactiveMongoClientFactoryBean clientFactory = new ReactiveMongoClientFactoryBean();
|
||||
clientFactory.setHost("localhost");
|
||||
|
||||
return clientFactory;
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
To access the `com.mongodb.reactivestreams.client.MongoClient` object created by the `ReactiveMongoClientFactoryBean` in other `@Configuration` or your own classes, get the `MongoClient` from the context.
|
||||
|
||||
|
||||
[[mongo.reactive.mongo-db-factory]]
|
||||
=== The ReactiveMongoDatabaseFactory Interface
|
||||
|
||||
While `com.mongodb.reactivestreams.client.MongoClient` is the entry point to the reactive MongoDB driver API, connecting to a specific MongoDB database instance requires additional information, such as the database name. With that information, you can obtain a `com.mongodb.reactivestreams.client.MongoDatabase` object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interface to bootstrap connectivity to the database. The following listing shows the `ReactiveMongoDatabaseFactory` interface:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface ReactiveMongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
}
|
||||
----
|
||||
|
||||
The `org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory` class implements the `ReactiveMongoDatabaseFactory` interface and is created with a standard `com.mongodb.reactivestreams.client.MongoClient` instance and the database name.
|
||||
|
||||
Instead of using the IoC container to create an instance of `ReactiveMongoTemplate`, you can use them in standard Java code, as follows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class MongoApp {
|
||||
|
||||
private static final Log log = LogFactory.getLog(MongoApp.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
ReactiveMongoOperations mongoOps = new ReactiveMongoOperations(new SimpleReactiveMongoDatabaseFactory(MongoClient.create(), "database"));
|
||||
|
||||
mongoOps.insert(new Person("Joe", 34))
|
||||
.flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class))
|
||||
.doOnNext(person -> log.info(person.toString()))
|
||||
.flatMap(person -> mongoOps.dropCollection("person"))
|
||||
.subscribe();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The use of `SimpleReactiveMongoDatabaseFactory` is the only difference between the listing shown in the <<mongodb-reactive-getting-started,getting started section>>.
|
||||
|
||||
[[mongo.reactive.mongo-db-factory-java]]
|
||||
=== Registering a ReactiveMongoDatabaseFactory Instance by Using Java-based Metadata
|
||||
|
||||
To register a `ReactiveMongoDatabaseFactory` instance with the container, you can write code much like what was highlighted in the previous code listing, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class MongoConfiguration {
|
||||
|
||||
public @Bean ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory() {
|
||||
return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
To define the username and password, create a MongoDB connection string and pass it into the factory method, as the next listing shows. The following listing also shows how to use `ReactiveMongoDatabaseFactory` to register an instance of `ReactiveMongoTemplate` with the container:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class MongoConfiguration {
|
||||
|
||||
public @Bean ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory() {
|
||||
return new SimpleReactiveMongoDatabaseFactory(MongoClients.create("mongodb://joe:secret@localhost"), "database");
|
||||
}
|
||||
|
||||
public @Bean ReactiveMongoTemplate reactiveMongoTemplate() {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDatabaseFactory());
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
[[mongo.reactive.template]]
|
||||
== Introduction to `ReactiveMongoTemplate`
|
||||
|
||||
The `ReactiveMongoTemplate` class, located in the `org.springframework.data.mongodb` package, is the central class of the Spring's Reactive MongoDB support and provides a rich feature set to interact with the database. The template offers convenience operations to create, update, delete, and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents.
|
||||
|
||||
NOTE: Once configured, `ReactiveMongoTemplate` is thread-safe and can be reused across multiple instances.
|
||||
|
||||
The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the `MongoConverter` interface. Spring provides a default implementation with `MongoMappingConverter`, but you can also write your own converter. See the <<mongo.custom-converters,section on `MongoConverter` instances>> for more detailed information.
|
||||
|
||||
The `ReactiveMongoTemplate` class implements the `ReactiveMongoOperations` interface. As much as possible, the methods on `ReactiveMongoOperations` mirror methods available on the MongoDB driver `Collection` object, to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you can find methods such as `find`, `findAndModify`, `findOne`, `insert`, `remove`, `save`, `update`, and `updateMulti`. The design goal is to make it as easy as possible to transition between the use of the base MongoDB driver and `ReactiveMongoOperations`. A major difference between the two APIs is that `ReactiveMongoOperations` can be passed domain objects instead of `Document`, and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations.
|
||||
|
||||
NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is through its `ReactiveMongoOperations` interface.
|
||||
|
||||
The default converter implementation used by `ReactiveMongoTemplate` is `MappingMongoConverter`. While the `MappingMongoConverter` can use additional metadata to specify the mapping of objects to documents, it can also convert objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations are explained in the <<mapping-chapter,Mapping chapter>>.
|
||||
|
||||
Another central feature of `ReactiveMongoTemplate` is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. See the section on <<mongo.exception,exception translation>> for more information.
|
||||
|
||||
There are many convenience methods on `ReactiveMongoTemplate` to help you easily perform common tasks. However, if you need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate, you can use one of several `execute` callback methods to access underlying driver APIs. The `execute` callbacks give you a reference to either a `com.mongodb.reactivestreams.client.MongoCollection` or a `com.mongodb.reactivestreams.client.MongoDatabase` object. See <<mongo.reactive.executioncallback,Execution Callbacks>> for more information.
|
||||
|
||||
[[mongo.reactive.template.instantiating]]
|
||||
=== Instantiating ReactiveMongoTemplate
|
||||
|
||||
You can use Java to create and register an instance of `ReactiveMongoTemplate`, as follows:
|
||||
|
||||
.Registering a `com.mongodb.reactivestreams.client.MongoClient` object and enabling Spring's exception translation support
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
public class AppConfig {
|
||||
|
||||
public @Bean MongoClient reactiveMongoClient() {
|
||||
return MongoClients.create("mongodb://localhost");
|
||||
}
|
||||
|
||||
public @Bean ReactiveMongoTemplate reactiveMongoTemplate() {
|
||||
return new ReactiveMongoTemplate(reactiveMongoClient(), "mydatabase");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
There are several overloaded constructors of `ReactiveMongoTemplate`, including:
|
||||
|
||||
* `ReactiveMongoTemplate(MongoClient mongo, String databaseName)`: Takes the `com.mongodb.reactivestreams.client.MongoClient` object and the default database name to operate against.
|
||||
* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory)`: Takes a `ReactiveMongoDatabaseFactory` object that encapsulated the `com.mongodb.reactivestreams.client.MongoClient` object and database name.
|
||||
* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter)`: Adds a `MongoConverter` to use for mapping.
|
||||
|
||||
When creating a `ReactiveMongoTemplate`, you might also want to set the following properties:
|
||||
|
||||
* `WriteResultCheckingPolicy`
|
||||
* `WriteConcern`
|
||||
* `ReadPreference`
|
||||
|
||||
NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is through its `ReactiveMongoOperations` interface.
|
||||
|
||||
|
||||
[[mongo.reactive.template.writeresultchecking]]
|
||||
=== `WriteResultChecking` Policy
|
||||
|
||||
When in development, it is handy to either log or throw an `Exception` if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully when, in fact, the database was not modified according to your expectations. Set the `MongoTemplate` `WriteResultChecking` property to an enum with the following values, `LOG`, `EXCEPTION`, or `NONE` to either log the error, throw and exception or do nothing. The default is to use a `WriteResultChecking` value of `NONE`.
|
||||
|
||||
|
||||
[[mongo.reactive.template.writeconcern]]
|
||||
=== `WriteConcern`
|
||||
|
||||
If it has not yet been specified through the driver at a higher level (such as `MongoDatabase`), you can set the `com.mongodb.WriteConcern` property that the `ReactiveMongoTemplate` uses for write operations. If ReactiveMongoTemplate's `WriteConcern` property is not set, it defaults to the one set in the MongoDB driver's `MongoDatabase` or `MongoCollection` setting.
|
||||
|
||||
|
||||
[[mongo.reactive.template.writeconcernresolver]]
|
||||
=== `WriteConcernResolver`
|
||||
|
||||
For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert, and save operations), a strategy interface called `WriteConcernResolver` can be configured on `ReactiveMongoTemplate`. Since `ReactiveMongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The following listing shows the `WriteConcernResolver` interface:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface WriteConcernResolver {
|
||||
WriteConcern resolve(MongoAction action);
|
||||
}
|
||||
----
|
||||
|
||||
The argument, `MongoAction`, determines the `WriteConcern` value to be used and whether to use the value of the template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `DBObject`, the operation as a value from the `MongoActionOperation` enumeration (one of `REMOVE`, `UPDATE`, `INSERT`, `INSERT_LIST`, and `SAVE`), and a few other pieces of contextual information. The following example shows how to create a `WriteConcernResolver`:
|
||||
|
||||
[source]
|
||||
----
|
||||
private class MyAppWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
if (action.getEntityClass().getSimpleName().contains("Audit")) {
|
||||
return WriteConcern.NONE;
|
||||
} else if (action.getEntityClass().getSimpleName().contains("Metadata")) {
|
||||
return WriteConcern.JOURNAL_SAFE;
|
||||
}
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
[[mongo.reactive.template.save-update-remove]]
|
||||
== Saving, Updating, and Removing Documents
|
||||
|
||||
`ReactiveMongoTemplate` lets you save, update, and delete your domain objects and map those objects to documents stored in MongoDB.
|
||||
|
||||
Consider the following `Person` class:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class Person {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private int age;
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
|
||||
}
|
||||
|
||||
}
|
||||
----
|
||||
|
||||
The following listing shows how you can save, update, and delete the `Person` object:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class ReactiveMongoApp {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database");
|
||||
|
||||
mongoOps.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person))
|
||||
.flatMap(person -> mongoOps.findById(person.getId(), Person.class))
|
||||
.doOnNext(person -> log.info("Found: " + person))
|
||||
.zipWith(person -> mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class))
|
||||
.flatMap(tuple -> mongoOps.remove(tuple.getT1())).flatMap(deleteResult -> mongoOps.findAll(Person.class))
|
||||
.count().doOnSuccess(count -> {
|
||||
log.info("Number of people: " + count);
|
||||
latch.countDown();
|
||||
})
|
||||
|
||||
.subscribe();
|
||||
|
||||
latch.await();
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
The preceding example includes implicit conversion between a `String` and `ObjectId` (by using the `MongoConverter`) as stored in the database and recognizing a convention of the property `Id` name.
|
||||
|
||||
NOTE: The preceding example is meant to show the use of save, update, and remove operations on `ReactiveMongoTemplate` and not to show complex mapping or chaining functionality.
|
||||
|
||||
"`<<mongo.query,Querying Documents>>`" explains the query syntax used in the preceding example in more detail. Additional documentation can be found in <<mongo-template, the blocking `MongoTemplate`>> section.
|
||||
|
||||
[[mongo.reactive.executioncallback]]
|
||||
== Execution Callbacks
|
||||
|
||||
One common design feature of all Spring template classes is that all functionality is routed into one of the templates that run callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using the `execute` callback is the preferred way to access the MongoDB driver's `MongoDatabase` and `MongoCollection` objects to perform uncommon operations that were not exposed as methods on `ReactiveMongoTemplate`.
|
||||
|
||||
Here is a list of `execute` callback methods.
|
||||
|
||||
* `<T> Flux<T>` *execute* `(Class<?> entityClass, ReactiveCollectionCallback<T> action)`: Runs the given `ReactiveCollectionCallback` for the entity collection of the specified class.
|
||||
|
||||
* `<T> Flux<T>` *execute* `(String collectionName, ReactiveCollectionCallback<T> action)`: Runs the given `ReactiveCollectionCallback` on the collection of the given name.
|
||||
|
||||
* `<T> Flux<T>` *execute* `(ReactiveDatabaseCallback<T> action)`: Runs a `ReactiveDatabaseCallback` translating any exceptions as necessary.
|
||||
|
||||
The following example uses the `ReactiveCollectionCallback` to return information about an index:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Flux<Boolean> hasIndex = operations.execute("geolocation",
|
||||
collection -> Flux.from(collection.listIndexes(Document.class))
|
||||
.filter(document -> document.get("name").equals("fancy-index-name"))
|
||||
.flatMap(document -> Mono.just(true))
|
||||
.defaultIfEmpty(false));
|
||||
----
|
||||
|
||||
[[reactive.gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS.
|
||||
Spring Data MongoDB provides a `ReactiveGridFsOperations` interface as well as the corresponding implementation, `ReactiveGridFsTemplate`, to let you interact with the filesystem.
|
||||
You can set up a `ReactiveGridFsTemplate` instance by handing it a `ReactiveMongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
.JavaConfig setup for a ReactiveGridFsTemplate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractReactiveMongoConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public ReactiveGridFsTemplate reactiveGridFsTemplate() {
|
||||
return new ReactiveGridFsTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using ReactiveGridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsTemplate operations;
|
||||
|
||||
@Test
|
||||
public Mono<ObjectId> storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Publisher<DataBuffer> file = … // lookup File or Resource
|
||||
|
||||
return operations.store(file, "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `Publisher<DataBuffer>`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `ReactiveGridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
NOTE: MongoDB's driver uses `AsyncInputStream` and `AsyncOutputStream` interfaces to exchange binary streams. Spring Data MongoDB adapts these interfaces to `Publisher<DataBuffer>`. Read more about `DataBuffer` in https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#databuffers[Spring's reference documentation].
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `ReactiveGridFsTemplate` to query for files:
|
||||
|
||||
.Using ReactiveGridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsTemplate operations;
|
||||
|
||||
@Test
|
||||
public Flux<GridFSFile> findFilesInGridFs() {
|
||||
return operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods modeled along the lines of `ResourcePatternResolver`.
|
||||
`ReactiveGridFsOperations` uses reactive types to defer running while `ResourcePatternResolver` uses a synchronous interface.
|
||||
These methods allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `ReactiveGridFsTemplate` to read files:
|
||||
|
||||
.Using ReactiveGridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class ReactiveGridFsClient {
|
||||
|
||||
@Autowired
|
||||
ReactiveGridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
Flux<ReactiveGridFsResource> txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
@@ -1,46 +0,0 @@
|
||||
[[time-series]]
|
||||
== Time Series
|
||||
|
||||
MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events.
|
||||
Those collections need to be created as such before inserting any data.
|
||||
Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below.
|
||||
|
||||
.Create a Time Series Collection
|
||||
====
|
||||
.Create a Time Series via the MongoDB Driver
|
||||
[code,java]
|
||||
----
|
||||
template.execute(db -> {
|
||||
|
||||
com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions();
|
||||
options.timeSeriesOptions(new TimeSeriesOptions("timestamp"));
|
||||
|
||||
db.createCollection("weather", options);
|
||||
return "OK";
|
||||
});
|
||||
----
|
||||
|
||||
.Create a Time Series Collection with `CollectionOptions`
|
||||
[code,java]
|
||||
----
|
||||
template.createCollection("weather", CollectionOptions.timeSeries("timestamp"));
|
||||
----
|
||||
|
||||
.Create a Time Series Collection derived from an Annotation
|
||||
[code,java]
|
||||
----
|
||||
@TimeSeries(collection="weather", timeField = "timestamp")
|
||||
public class Measurement {
|
||||
|
||||
String id;
|
||||
Instant timestamp;
|
||||
// ...
|
||||
}
|
||||
|
||||
template.createCollection(Measurement.class);
|
||||
----
|
||||
====
|
||||
|
||||
The snippets above can easily be transferred to the reactive API offering the very same methods.
|
||||
Make sure to properly _subscribe_ to the returned publishers.
|
||||
|
||||
Reference in New Issue
Block a user