Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdc78592ee | ||
|
|
ab97e58793 | ||
|
|
a4eeb9f305 | ||
|
|
de6c649c83 | ||
|
|
e90c6b0790 | ||
|
|
d2e68cd925 | ||
|
|
7ed48f5e76 | ||
|
|
2359357977 | ||
|
|
a90f238574 | ||
|
|
1c9188f7e1 | ||
|
|
a2f7c3f482 | ||
|
|
3440bf6c4d | ||
|
|
deed19187f | ||
|
|
c5f2abe037 | ||
|
|
6cce16414e | ||
|
|
a85855a307 | ||
|
|
31390d41e0 | ||
|
|
117ab7c033 | ||
|
|
73fbaaf3bd | ||
|
|
17937b0475 | ||
|
|
46943716ee | ||
|
|
25af5b5f79 | ||
|
|
a5a4c6d8c4 | ||
|
|
5885d084be | ||
|
|
d8fdc18265 | ||
|
|
840bde65e8 | ||
|
|
f2ee7d90c4 | ||
|
|
898489fecf | ||
|
|
3575d5461e | ||
|
|
4fa09d80db | ||
|
|
bb84b92d1d | ||
|
|
af85b46e7d | ||
|
|
96fbe49cdb | ||
|
|
6b36c792b9 |
@@ -16,7 +16,9 @@ env:
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- mongodb-3.4-precise
|
||||
- mongodb-upstart
|
||||
- sourceline: 'deb [arch=amd64] http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.4 multiverse'
|
||||
key_url: 'https://www.mongodb.org/static/pgp/server-3.4.asc'
|
||||
packages:
|
||||
- mongodb-org-server
|
||||
- mongodb-org-shell
|
||||
|
||||
8
pom.xml
8
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -28,9 +28,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.0.0.M3</springdata.commons>
|
||||
<springdata.commons>2.0.0.M4</springdata.commons>
|
||||
<mongo>3.4.2</mongo>
|
||||
<mongo.reactivestreams>1.3.0</mongo.reactivestreams>
|
||||
<mongo.reactivestreams>1.5.0</mongo.reactivestreams>
|
||||
</properties>
|
||||
|
||||
<developers>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# MongoDB Log4J Appender
|
||||
|
||||
:warning: Deprecated. About to be removed for 2.0.0.RC1.
|
||||
|
||||
This module sets up a Log4J appender that puts logging events in MongoDB. It is fully configurable
|
||||
and connects directly to the MongoDB server using the driver. It has no dependency on any Spring package.
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -44,7 +44,9 @@ import com.mongodb.WriteConcern;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Ricardo Espirito Santo
|
||||
* @deprecated since 2.0.0.M4. About to be removed for 2.0.0.RC1.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
|
||||
public static final String LEVEL = "level";
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Infrastructure for to use MongoDB as a logging sink.
|
||||
*
|
||||
* @deprecated since 2.0.0.M4. About to be removed for 2.0.0.RC1.
|
||||
*/
|
||||
package org.springframework.data.mongodb.log4j;
|
||||
|
||||
|
||||
@@ -11,13 +11,14 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.M3</version>
|
||||
<version>2.0.0.M4</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<objenesis>1.3</objenesis>
|
||||
<equalsverifier>1.5</equalsverifier>
|
||||
<kotlin>1.1.2-5</kotlin>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -111,7 +112,7 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor.addons</groupId>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-test</artifactId>
|
||||
<version>${reactor}</version>
|
||||
<optional>true</optional>
|
||||
@@ -231,11 +232,117 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Kotlin extension -->
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.nhaarman</groupId>
|
||||
<artifactId>mockito-kotlin</artifactId>
|
||||
<version>1.5.0</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
<plugin>
|
||||
<artifactId>kotlin-maven-plugin</artifactId>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<version>${kotlin}</version>
|
||||
<configuration>
|
||||
<jvmTarget>${source.level}</jvmTarget>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sourceDirs>
|
||||
<sourceDir>${project.basedir}/src/main/kotlin</sourceDir>
|
||||
<sourceDir>${project.basedir}/src/main/java</sourceDir>
|
||||
</sourceDirs>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>test-compile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sourceDirs>
|
||||
<sourceDir>${project.basedir}/src/test/kotlin</sourceDir>
|
||||
<sourceDir>${project.basedir}/src/test/java</sourceDir>
|
||||
</sourceDirs>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>default-compile</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>default-testCompile</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>java-compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>java-test-compile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>testCompile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mysema.maven</groupId>
|
||||
<artifactId>apt-maven-plugin</artifactId>
|
||||
|
||||
@@ -28,10 +28,10 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
public class CollectionOptions {
|
||||
|
||||
private Integer maxDocuments;
|
||||
private Integer size;
|
||||
private Long maxDocuments;
|
||||
private Long size;
|
||||
private Boolean capped;
|
||||
private Optional<Collation> collation;
|
||||
private Collation collation;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -40,12 +40,14 @@ public class CollectionOptions {
|
||||
* @param maxDocuments the maximum number of documents in the collection.
|
||||
* @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order),
|
||||
* false otherwise.
|
||||
* @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point.
|
||||
*/
|
||||
public CollectionOptions(Integer size, Integer maxDocuments, Boolean capped) {
|
||||
this(size, maxDocuments, capped, Optional.empty());
|
||||
@Deprecated
|
||||
public CollectionOptions(Long size, Long maxDocuments, Boolean capped) {
|
||||
this(size, maxDocuments, capped, null);
|
||||
}
|
||||
|
||||
private CollectionOptions(Integer size, Integer maxDocuments, Boolean capped, Optional<Collation> collation) {
|
||||
private CollectionOptions(Long size, Long maxDocuments, Boolean capped, Collation collation) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
@@ -53,8 +55,6 @@ public class CollectionOptions {
|
||||
this.collation = collation;
|
||||
}
|
||||
|
||||
private CollectionOptions() {}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} by just providing the {@link Collation} to use.
|
||||
*
|
||||
@@ -66,9 +66,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
CollectionOptions options = new CollectionOptions();
|
||||
options.setCollation(collation);
|
||||
return options;
|
||||
return new CollectionOptions(null, null, null, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,17 +76,17 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions();
|
||||
return new CollectionOptions(null, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
|
||||
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
|
||||
* <strong>NOTE</strong> Using capped collections requires defining {@link #size(int)}.
|
||||
*
|
||||
* @param size the collection size in bytes, this data space is preallocated.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped(int size) {
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation);
|
||||
}
|
||||
|
||||
@@ -99,7 +97,7 @@ public class CollectionOptions {
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(Integer maxDocuments) {
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
@@ -110,7 +108,7 @@ public class CollectionOptions {
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(int size) {
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
@@ -122,50 +120,44 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, Optional.ofNullable(collation));
|
||||
}
|
||||
|
||||
public Integer getMaxDocuments() {
|
||||
return maxDocuments;
|
||||
}
|
||||
|
||||
public void setMaxDocuments(Integer maxDocuments) {
|
||||
this.maxDocuments = maxDocuments;
|
||||
}
|
||||
|
||||
public Integer getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public void setSize(Integer size) {
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
public Boolean getCapped() {
|
||||
return capped;
|
||||
}
|
||||
|
||||
public void setCapped(Boolean capped) {
|
||||
this.capped = capped;
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set {@link Collation} options.
|
||||
* Get the max number of documents the collection should be limited to.
|
||||
*
|
||||
* @param collation
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Long> getMaxDocuments() {
|
||||
return Optional.ofNullable(maxDocuments);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@literal size} in bytes the collection should be limited to.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Long> getSize() {
|
||||
return Optional.ofNullable(size);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get if the collection should be capped.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.0
|
||||
*/
|
||||
public void setCollation(Collation collation) {
|
||||
this.collation = Optional.ofNullable(collation);
|
||||
public Optional<Boolean> getCapped() {
|
||||
return Optional.ofNullable(capped);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link Collation} settings.
|
||||
*
|
||||
* @return
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.0
|
||||
*/
|
||||
public Optional<Collation> getCollation() {
|
||||
return collation;
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
|
||||
/**
|
||||
* {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent
|
||||
* API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping the {@link Aggregation} provided via {@code by} into the
|
||||
* MongoDB specific representation, as well as mapping back the resulting {@link org.bson.Document}. An alternative
|
||||
* input type for mapping the {@link Aggregation} can be provided by using
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* aggregateAndReturn(Jedi.class)
|
||||
* .by(newAggregation(Human.class, project("These are not the droids you are looking for")))
|
||||
* .get();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ExecutableAggregationOperation {
|
||||
|
||||
/**
|
||||
* Start creating an aggregation operation that returns results mapped to the given domain type. <br />
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different
|
||||
* input type for he aggregation.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationOperation}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> AggregationOperation<T> aggregateAndReturn(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface AggregationOperationWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the query on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link AggregationOperationWithAggregation}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
AggregationOperationWithAggregation<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingAggregationOperation<T> {
|
||||
|
||||
/**
|
||||
* Apply pipeline operations as specified and get all matching elements.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationResults<T> all();
|
||||
|
||||
/**
|
||||
* Apply pipeline operations as specified and stream all matching elements. <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor}
|
||||
*
|
||||
* @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed.
|
||||
* Never {@literal null}.
|
||||
*/
|
||||
CloseableIterator<T> stream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the aggregation with pipeline stages.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface AggregationOperationWithAggregation<T> {
|
||||
|
||||
/**
|
||||
* Set the aggregation to be used.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingAggregationOperation}.
|
||||
* @throws IllegalArgumentException if aggregation is {@literal null}.
|
||||
*/
|
||||
TerminatingAggregationOperation<T> by(Aggregation aggregation);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface AggregationOperation<T>
|
||||
extends AggregationOperationWithCollection<T>, AggregationOperationWithAggregation<T> {}
|
||||
}
|
||||
@@ -0,0 +1,115 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableAggregationOperation} operating directly on {@link MongoTemplate}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link ExecutableAggregationOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableAggregationOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> AggregationOperation<T> aggregateAndReturn(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new AggregationOperationSupport<>(template, null, domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class AggregationOperationSupport<T>
|
||||
implements AggregationOperationWithAggregation<T>, AggregationOperation<T>, TerminatingAggregationOperation<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Aggregation aggregation;
|
||||
private final Class<T> domainType;
|
||||
private final String collection;
|
||||
|
||||
@Override
|
||||
public AggregationOperationWithAggregation<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
return new AggregationOperationSupport<>(template, aggregation, domainType, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingAggregationOperation<T> by(Aggregation aggregation) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation must not be null!");
|
||||
|
||||
return new AggregationOperationSupport<>(template, aggregation, domainType, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregationResults<T> all() {
|
||||
return template.aggregate(aggregation, getCollectionName(aggregation), domainType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CloseableIterator<T> stream() {
|
||||
return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType);
|
||||
}
|
||||
|
||||
private String getCollectionName(Aggregation aggregation) {
|
||||
|
||||
if (StringUtils.hasText(collection)) {
|
||||
return collection;
|
||||
}
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
|
||||
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||
|
||||
if (typedAggregation.getInputType() != null) {
|
||||
return template.determineCollectionName(typedAggregation.getInputType());
|
||||
}
|
||||
}
|
||||
|
||||
return template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,190 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
|
||||
/**
|
||||
* {@link ExecutableFindOperation} allows creation and execution of MongoDB find operations in a fluent API style.
|
||||
* <br />
|
||||
* The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the
|
||||
* MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping back the
|
||||
* result from the {@link org.bson.Document}. However, it is possible to define an different {@literal returnType} via
|
||||
* {@code as} to mapping the result.<br />
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ExecutableFindOperation {
|
||||
|
||||
/**
|
||||
* Start creating a find operation for the given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link FindOperation}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> FindOperation<T> query(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Trigger find execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingFindOperation<T> {
|
||||
|
||||
/**
|
||||
* Get exactly zero or one result.
|
||||
*
|
||||
* @return {@link Optional#empty()} if no match found.
|
||||
* @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found.
|
||||
*/
|
||||
Optional<T> one();
|
||||
|
||||
/**
|
||||
* Get the first or no result.
|
||||
*
|
||||
* @return {@link Optional#empty()} if no match found.
|
||||
*/
|
||||
Optional<T> first();
|
||||
|
||||
/**
|
||||
* Get all matching elements.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<T> all();
|
||||
|
||||
/**
|
||||
* Stream all matching elements.
|
||||
*
|
||||
* @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed.
|
||||
* Never {@literal null}.
|
||||
*/
|
||||
CloseableIterator<T> stream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger geonear execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingFindNearOperation<T> {
|
||||
|
||||
/**
|
||||
* Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
GeoResults<T> all();
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminating operations invoking the actual query execution.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindOperationWithQuery<T> extends TerminatingFindOperation<T> {
|
||||
|
||||
/**
|
||||
* Set the filter query to be used.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFindOperation}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingFindOperation<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
* @param nearQuery must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFindNearOperation}.
|
||||
* @throws IllegalArgumentException if nearQuery is {@literal null}.
|
||||
*/
|
||||
TerminatingFindNearOperation<T> near(NearQuery nearQuery);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindOperationWithCollection<T> extends FindOperationWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the query on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link FindOperationWithProjection}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
FindOperationWithProjection<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindOperationWithProjection<T> extends FindOperationWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
* Skip this step if you are anyway only interested in the original domain type.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link FindOperationWithProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> FindOperationWithQuery<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link FindOperation} provides methods for constructing lookup operations in a fluent way.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindOperation<T> extends FindOperationWithCollection<T>, FindOperationWithProjection<T> {}
|
||||
}
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.FindIterable;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableFindOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableFindOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableFindOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> FindOperation<T> query(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new FindOperationSupport<>(template, domainType, domainType, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param <T>
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class FindOperationSupport<T> implements FindOperation<T>, FindOperationWithCollection<T>,
|
||||
FindOperationWithProjection<T>, FindOperationWithQuery<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final String collection;
|
||||
private final Query query;
|
||||
|
||||
@Override
|
||||
public FindOperationWithProjection<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new FindOperationSupport<>(template, domainType, returnType, collection, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T1> FindOperationWithQuery<T1> as(Class<T1> returnType) {
|
||||
|
||||
Assert.notNull(returnType, "ReturnType must not be null!");
|
||||
|
||||
return new FindOperationSupport<>(template, domainType, returnType, collection, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingFindOperation<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new FindOperationSupport<>(template, domainType, returnType, collection, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<T> one() {
|
||||
|
||||
List<T> result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(2));
|
||||
|
||||
if (ObjectUtils.isEmpty(result)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
if (result.size() > 1) {
|
||||
throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1);
|
||||
}
|
||||
|
||||
return Optional.of(result.iterator().next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<T> first() {
|
||||
|
||||
List<T> result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(1));
|
||||
|
||||
return ObjectUtils.isEmpty(result) ? Optional.empty() : Optional.of(result.iterator().next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<T> all() {
|
||||
return doFind(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CloseableIterator<T> stream() {
|
||||
return doStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingFindNearOperation<T> near(NearQuery nearQuery) {
|
||||
return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType);
|
||||
}
|
||||
|
||||
private List<T> doFind(CursorPreparer preparer) {
|
||||
|
||||
Document queryObject = query != null ? query.getQueryObject() : new Document();
|
||||
Document fieldsObject = query != null ? query.getFieldsObject() : new Document();
|
||||
|
||||
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
|
||||
getCursorPreparer(query, preparer));
|
||||
}
|
||||
|
||||
private CloseableIterator<T> doStream() {
|
||||
|
||||
return template.doStream(query != null ? query : new BasicQuery(new Document()), domainType, getCollectionName(),
|
||||
returnType);
|
||||
}
|
||||
|
||||
private CursorPreparer getCursorPreparer(Query query, CursorPreparer preparer) {
|
||||
return query == null || preparer != null ? preparer : template.new QueryCursorPreparer(query, domainType);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
|
||||
private String asString() {
|
||||
return SerializationUtils.serializeToJsonSafely(query);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
static class DelegatingQueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final CursorPreparer delegate;
|
||||
private Optional<Integer> limit = Optional.empty();
|
||||
|
||||
DelegatingQueryCursorPreparer(CursorPreparer delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindIterable<Document> prepare(FindIterable<Document> cursor) {
|
||||
|
||||
FindIterable<Document> target = delegate.prepare(cursor);
|
||||
|
||||
return limit.map(target::limit).orElse(target);
|
||||
}
|
||||
|
||||
CursorPreparer limit(int limit) {
|
||||
|
||||
this.limit = Optional.of(limit);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* {@link ExecutableInsertOperation} allows creation and execution of MongoDB insert and bulk insert operations in a
|
||||
* fluent API style. <br />
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* insert(Jedi.class)
|
||||
* .inCollection("star-wars")
|
||||
* .one(luke);
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ExecutableInsertOperation {
|
||||
|
||||
/**
|
||||
* Start creating an insert operation for given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link InsertOperation}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> InsertOperation<T> insert(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Trigger insert execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingInsertOperation<T> extends TerminatingBulkInsertOperation<T> {
|
||||
|
||||
/**
|
||||
* Insert exactly one object.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if object is {@literal null}.
|
||||
*/
|
||||
void one(T object);
|
||||
|
||||
/**
|
||||
* Insert a collection of objects.
|
||||
*
|
||||
* @param objects must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if objects is {@literal null}.
|
||||
*/
|
||||
void all(Collection<? extends T> objects);
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger bulk insert execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingBulkInsertOperation<T> {
|
||||
|
||||
/**
|
||||
* Bulk write collection of objects.
|
||||
*
|
||||
* @param objects must not be {@literal null}.
|
||||
* @return resulting {@link BulkWriteResult}.
|
||||
* @throws IllegalArgumentException if objects is {@literal null}.
|
||||
*/
|
||||
BulkWriteResult bulk(Collection<? extends T> objects);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface InsertOperation<T>
|
||||
extends TerminatingInsertOperation<T>, InsertOperationWithCollection<T>, InsertOperationWithBulkMode<T> {}
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface InsertOperationWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link InsertOperationWithBulkMode}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
InsertOperationWithBulkMode<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface InsertOperationWithBulkMode<T> extends TerminatingInsertOperation<T> {
|
||||
|
||||
/**
|
||||
* Define the {@link BulkMode} to use for bulk insert operation.
|
||||
*
|
||||
* @param bulkMode must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingBulkInsertOperation}.
|
||||
* @throws IllegalArgumentException if bulkMode is {@literal null}.
|
||||
*/
|
||||
TerminatingBulkInsertOperation<T> withBulkMode(BulkMode bulkMode);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableInsertOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableInsertOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableInsertOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> InsertOperation<T> insert(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new InsertOperationSupport<>(template, domainType, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class InsertOperationSupport<T> implements InsertOperation<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final String collection;
|
||||
private final BulkMode bulkMode;
|
||||
|
||||
@Override
|
||||
public void one(T object) {
|
||||
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
|
||||
template.insert(object, getCollectionName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void all(Collection<? extends T> objects) {
|
||||
|
||||
Assert.notNull(objects, "Objects must not be null!");
|
||||
|
||||
template.insert(objects, getCollectionName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkWriteResult bulk(Collection<? extends T> objects) {
|
||||
|
||||
Assert.notNull(objects, "Objects must not be null!");
|
||||
|
||||
return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName())
|
||||
.insert(new ArrayList<>(objects)).execute();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InsertOperationWithBulkMode<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty.");
|
||||
|
||||
return new InsertOperationSupport<>(template, domainType, collection, bulkMode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingBulkInsertOperation<T> withBulkMode(BulkMode bulkMode) {
|
||||
|
||||
Assert.notNull(bulkMode, "BulkMode must not be null!");
|
||||
|
||||
return new InsertOperationSupport<>(template, domainType, collection, bulkMode);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* {@link ExecutableRemoveOperation} allows creation and execution of MongoDB remove / findAndRemove operations in a
|
||||
* fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the
|
||||
* MongoDB specific representation. The collection to operate on is by default derived from the initial
|
||||
* {@literal domainType} and can be defined there via {@link org.springframework.data.mongodb.core.mapping.Document}.
|
||||
* Using {@code inCollection} allows to override the collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* remove(Jedi.class)
|
||||
* .inCollection("star-wars")
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ExecutableRemoveOperation {
|
||||
|
||||
/**
|
||||
* Start creating a remove operation for the given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link RemoveOperation}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> RemoveOperation<T> remove(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @param <T>
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface RemoveOperationWithCollection<T> extends RemoveOperationWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the query on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link RemoveOperationWithCollection}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
RemoveOperationWithQuery<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingRemoveOperation<T> {
|
||||
|
||||
/**
|
||||
* Remove all documents matching.
|
||||
*
|
||||
* @return the {@link DeleteResult}. Never {@literal null}.
|
||||
*/
|
||||
DeleteResult all();
|
||||
|
||||
/**
|
||||
* Remove and return all matching documents. <br/>
|
||||
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
|
||||
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
|
||||
* operation.
|
||||
*
|
||||
* @return empty {@link List} if no match found. Never {@literal null}.
|
||||
*/
|
||||
List<T> findAndRemove();
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface RemoveOperationWithQuery<T> extends TerminatingRemoveOperation<T> {
|
||||
|
||||
/**
|
||||
* Define the query filtering elements.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemoveOperation}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemoveOperation<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface RemoveOperation<T> extends RemoveOperationWithCollection<T> {}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableRemoveOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
|
||||
private final MongoTemplate tempate;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableRemoveOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableRemoveOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.tempate = template;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> RemoveOperation<T> remove(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new RemoveOperationSupport<>(tempate, null, domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class RemoveOperationSupport<T> implements RemoveOperation<T>, RemoveOperationWithCollection<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Query query;
|
||||
private final Class<T> domainType;
|
||||
private final String collection;
|
||||
|
||||
@Override
|
||||
public RemoveOperationWithQuery<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
return new RemoveOperationSupport<>(template, query, domainType, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingRemoveOperation<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new RemoveOperationSupport<>(template, query, domainType, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DeleteResult all() {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.doRemove(collectionName, getQuery(), domainType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<T> findAndRemove() {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.doFindAndDelete(collectionName, getQuery(), domainType);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
|
||||
private Query getQuery() {
|
||||
return query != null ? query : new BasicQuery(new Document());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
/**
|
||||
* {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify operations in a
|
||||
* fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as
|
||||
* the {@link Update} via {@code apply} into the MongoDB specific representations. The collection to operate on is by
|
||||
* default derived from the initial {@literal domainType} and can be defined there via
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* update(Jedi.class)
|
||||
* .inCollection("star-wars")
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .apply(new Update().set("lastname", "skywalker"))
|
||||
* .upsert();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ExecutableUpdateOperation {
|
||||
|
||||
/**
|
||||
* Start creating an update operation for the given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateOperation}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> UpdateOperation<T> update(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface UpdateOperation<T>
|
||||
extends UpdateOperationWithCollection<T>, UpdateOperationWithQuery<T>, UpdateOperationWithUpdate<T> {}
|
||||
|
||||
/**
|
||||
* Declare the {@link Update} to apply.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface UpdateOperationWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link Update} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdateOperation}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
*/
|
||||
TerminatingUpdateOperation<T> apply(Update update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly define the name of the collection to perform operation in.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface UpdateOperationWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the query on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link UpdateOperationWithCollection}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
UpdateOperationWithQuery<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a filter query for the {@link Update}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface UpdateOperationWithQuery<T> extends UpdateOperationWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Filter documents by given {@literal query}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateOperationWithQuery}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateOperationWithUpdate<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define {@link FindAndModifyOptions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindAndModifyWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Explicitly define {@link FindAndModifyOptions} for the {@link Update}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link FindAndModifyWithOptions}.
|
||||
* @throws IllegalArgumentException if options is {@literal null}.
|
||||
*/
|
||||
TerminatingFindAndModifyOperation<T> withOptions(FindAndModifyOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger findAndModify execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface TerminatingFindAndModifyOperation<T> {
|
||||
|
||||
/**
|
||||
* Find, modify and return the first matching document.
|
||||
*
|
||||
* @return {@link Optional#empty()} if nothing found.
|
||||
*/
|
||||
Optional<T> findAndModify();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger update execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingUpdateOperation<T> extends TerminatingFindAndModifyOperation<T>, FindAndModifyWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Update all matching documents in the collection.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult all();
|
||||
|
||||
/**
|
||||
* Update the first document in the collection.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult first();
|
||||
|
||||
/**
|
||||
* Creates a new document if no documents match the filter query or updates the matching ones.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult upsert();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableUpdateOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Creates new {@link ExecutableUpdateOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ExecutableUpdateOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> UpdateOperation<T> update(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new UpdateOperationSupport<>(template, null, domainType, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class UpdateOperationSupport<T> implements UpdateOperation<T>, UpdateOperationWithCollection<T>,
|
||||
UpdateOperationWithQuery<T>, TerminatingUpdateOperation<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Query query;
|
||||
private final Class<T> domainType;
|
||||
private final Update update;
|
||||
private final String collection;
|
||||
private final FindAndModifyOptions options;
|
||||
|
||||
@Override
|
||||
public TerminatingUpdateOperation<T> apply(Update update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return new UpdateOperationSupport<>(template, query, domainType, update, collection, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateOperationWithQuery<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
return new UpdateOperationSupport<>(template, query, domainType, update, collection, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateResult first() {
|
||||
return doUpdate(false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateResult upsert() {
|
||||
return doUpdate(true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<T> findAndModify() {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return Optional.ofNullable(template.findAndModify(query != null ? query : new BasicQuery(new Document()), update,
|
||||
options, domainType, collectionName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateOperationWithUpdate<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new UpdateOperationSupport<>(template, query, domainType, update, collection, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateResult all() {
|
||||
return doUpdate(true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminatingFindAndModifyOperation<T> withOptions(FindAndModifyOptions options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new UpdateOperationSupport<>(template, query, domainType, update, collection, options);
|
||||
}
|
||||
|
||||
private UpdateResult doUpdate(boolean multi, boolean upsert) {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
Query query = this.query != null ? this.query : new BasicQuery(new Document());
|
||||
|
||||
return template.doUpdate(collectionName, query, update, domainType, upsert, multi);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Stripped down interface providing access to a fluent API that specifies a basic set of MongoDB operations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface FluentMongoOperations extends ExecutableFindOperation, ExecutableInsertOperation,
|
||||
ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation {}
|
||||
@@ -58,7 +58,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Thomas Darimont
|
||||
* @author Maninder Singh
|
||||
*/
|
||||
public interface MongoOperations {
|
||||
public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* The collection name used for the specified class by this template.
|
||||
|
||||
@@ -20,8 +20,19 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
import static org.springframework.data.util.Optionals.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Scanner;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
@@ -333,10 +344,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <T> CloseableIterator<T> stream(final Query query, final Class<T> entityType, final String collectionName) {
|
||||
return doStream(query, entityType, collectionName, entityType);
|
||||
}
|
||||
|
||||
protected <T> CloseableIterator<T> doStream(final Query query, final Class<?> entityType, final String collectionName,
|
||||
Class<T> returnType) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(returnType, "ReturnType must not be null!");
|
||||
|
||||
return execute(collectionName, new CollectionCallback<CloseableIterator<T>>() {
|
||||
|
||||
@@ -353,7 +370,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
.prepare(collection.find(mappedQuery).projection(mappedFields));
|
||||
|
||||
return new CloseableIterableCursorAdapter<T>(cursor, exceptionTranslator,
|
||||
new ReadDocumentCallback<T>(mongoConverter, entityType, collectionName));
|
||||
new ReadDocumentCallback<T>(mongoConverter, returnType, collectionName));
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -633,17 +650,21 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName) {
|
||||
public <T> GeoResults<T> geoNear(NearQuery near, Class<T> domainType, String collectionName) {
|
||||
return geoNear(near, domainType, collectionName, domainType);
|
||||
}
|
||||
|
||||
public <T> GeoResults<T> geoNear(NearQuery near, Class<?> domainType, String collectionName, Class<T> returnType) {
|
||||
|
||||
if (near == null) {
|
||||
throw new InvalidDataAccessApiUsageException("NearQuery must not be null!");
|
||||
}
|
||||
|
||||
if (entityClass == null) {
|
||||
if (domainType == null) {
|
||||
throw new InvalidDataAccessApiUsageException("Entity class must not be null!");
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass);
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(domainType);
|
||||
Document nearDocument = near.toDocument();
|
||||
|
||||
Document command = new Document("geoNear", collection);
|
||||
@@ -651,12 +672,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (nearDocument.containsKey("query")) {
|
||||
Document query = (Document) nearDocument.get("query");
|
||||
command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass)));
|
||||
command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(domainType)));
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command),
|
||||
entityClass, collectionName);
|
||||
domainType, collectionName);
|
||||
}
|
||||
|
||||
Document commandResult = executeCommand(command, this.readPreference);
|
||||
@@ -664,7 +685,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
DocumentCallback<GeoResult<T>> callback = new GeoNearResultDocumentCallback<T>(
|
||||
new ReadDocumentCallback<T>(mongoConverter, entityClass, collectionName), near.getMetric());
|
||||
new ReadDocumentCallback<T>(mongoConverter, returnType, collectionName), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
@@ -1625,8 +1646,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return aggregateStream(aggregation, collectionName, outputType, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@@ -1634,8 +1654,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return findAndRemove(query, null, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
@@ -1643,8 +1662,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return findAllAndRemove(query, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@@ -1765,6 +1783,51 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> FindOperation<T> query(Class<T> domainType) {
|
||||
return new ExecutableFindOperationSupport(this).query(domainType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> UpdateOperation<T> update(Class<T> domainType) {
|
||||
return new ExecutableUpdateOperationSupport(this).update(domainType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> RemoveOperation<T> remove(Class<T> domainType) {
|
||||
return new ExecutableRemoveOperationSupport(this).remove(domainType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> AggregationOperation<T> aggregateAndReturn(Class<T> domainType) {
|
||||
return new ExecutableAggregationOperationSupport(this).aggregateAndReturn(domainType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#insert(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> InsertOperation<T> insert(Class<T> domainType) {
|
||||
return new ExecutableInsertOperationSupport(this).insert(domainType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that the {@link Document} does not enable Aggregation explain mode.
|
||||
*
|
||||
@@ -1947,20 +2010,37 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while
|
||||
* using sourceClass for mapping the query.
|
||||
*
|
||||
* @since 2.0
|
||||
*/
|
||||
<S, T> List<T> doFind(String collectionName, Document query, Document fields, Class<S> sourceClass,
|
||||
Class<T> targetClass, CursorPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer,
|
||||
new ReadDocumentCallback<T>(mongoConverter, targetClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
protected Document convertToDocument(CollectionOptions collectionOptions) {
|
||||
|
||||
Document document = new Document();
|
||||
if (collectionOptions != null) {
|
||||
if (collectionOptions.getCapped() != null) {
|
||||
document.put("capped", collectionOptions.getCapped().booleanValue());
|
||||
}
|
||||
if (collectionOptions.getSize() != null) {
|
||||
document.put("size", collectionOptions.getSize().intValue());
|
||||
}
|
||||
if (collectionOptions.getMaxDocuments() != null) {
|
||||
document.put("max", collectionOptions.getMaxDocuments().intValue());
|
||||
}
|
||||
|
||||
collectionOptions.getCapped().ifPresent(val -> document.put("capped", val));
|
||||
collectionOptions.getSize().ifPresent(val -> document.put("size", val));
|
||||
collectionOptions.getMaxDocuments().ifPresent(val -> document.put("max", val));
|
||||
collectionOptions.getCollation().ifPresent(val -> document.append("collation", val.toDocument()));
|
||||
}
|
||||
return document;
|
||||
@@ -2181,11 +2261,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
private Optional<? extends MongoPersistentEntity<?>> getPersistentEntity(Class<?> type) {
|
||||
return Optional.ofNullable(type).flatMap(it -> mappingContext.getPersistentEntity(it));
|
||||
return Optional.ofNullable(type).flatMap(mappingContext::getPersistentEntity);
|
||||
}
|
||||
|
||||
private Optional<MongoPersistentProperty> getIdPropertyFor(Class<?> type) {
|
||||
return mappingContext.getPersistentEntity(type).flatMap(it -> it.getIdProperty());
|
||||
return mappingContext.getPersistentEntity(type).flatMap(PersistentEntity::getIdProperty);
|
||||
}
|
||||
|
||||
private <T> String determineEntityCollectionName(T obj) {
|
||||
@@ -2669,4 +2749,5 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public MongoDbFactory getMongoDbFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1626,17 +1626,10 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
CreateCollectionOptions result = new CreateCollectionOptions();
|
||||
if (collectionOptions != null) {
|
||||
|
||||
if (collectionOptions.getCapped() != null) {
|
||||
result = result.capped(collectionOptions.getCapped());
|
||||
}
|
||||
|
||||
if (collectionOptions.getSize() != null) {
|
||||
result = result.sizeInBytes(collectionOptions.getSize());
|
||||
}
|
||||
|
||||
if (collectionOptions.getMaxDocuments() != null) {
|
||||
result = result.maxDocuments(collectionOptions.getMaxDocuments());
|
||||
}
|
||||
collectionOptions.getCapped().ifPresent(result::capped);
|
||||
collectionOptions.getSize().ifPresent(result::sizeInBytes);
|
||||
collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments);
|
||||
collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
* Copyright 2013-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -49,9 +49,10 @@ import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Renders the AST of a SpEL expression as a MongoDB Aggregation Framework projection expression.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
@@ -84,7 +85,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
* {@link AggregationOperationContext} {@code context}.
|
||||
* <p>
|
||||
* Exposes the given @{code params} as <code>[0] ... [n]</code>.
|
||||
*
|
||||
*
|
||||
* @param expression must not be {@literal null}
|
||||
* @param context must not be {@literal null}
|
||||
* @param params must not be {@literal null}
|
||||
@@ -114,7 +115,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
/**
|
||||
* Returns an appropriate {@link ExpressionNodeConversion} for the given {@code node}. Throws an
|
||||
* {@link IllegalArgumentException} if no conversion could be found.
|
||||
*
|
||||
*
|
||||
* @param node
|
||||
* @return the appropriate {@link ExpressionNodeConversion} for the given {@link ExpressionNode}.
|
||||
*/
|
||||
@@ -133,7 +134,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* Abstract base class for {@link SpelNode} to (Db)-object conversions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -145,7 +146,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNodeConversion}.
|
||||
*
|
||||
*
|
||||
* @param transformer must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -161,7 +162,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
/**
|
||||
* Returns whether the current conversion supports the given {@link ExpressionNode}. By default we will match the
|
||||
* node type against the genric type the subclass types the type parameter to.
|
||||
*
|
||||
*
|
||||
* @param node will never be {@literal null}.
|
||||
* @return true if {@literal this} conversion can be applied to the given {@code node}.
|
||||
*/
|
||||
@@ -171,7 +172,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* Triggers the transformation for the given {@link ExpressionNode} and the given current context.
|
||||
*
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return
|
||||
@@ -187,7 +188,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
/**
|
||||
* Triggers the transformation with the given new {@link ExpressionNode}, new parent node, the current operation and
|
||||
* the previous context.
|
||||
*
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param parent
|
||||
* @param operation
|
||||
@@ -204,7 +205,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
context.getAggregationContext()));
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#transform(org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext)
|
||||
*/
|
||||
@@ -215,7 +216,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* Performs the actual conversion from {@link SpelNode} to the corresponding representation for MongoDB.
|
||||
*
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
@@ -224,7 +225,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts arithmetic operations.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class OperatorNodeConversion extends ExpressionNodeConversion<OperatorNode> {
|
||||
@@ -233,7 +234,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -258,8 +259,10 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return convertUnaryMinusOp(context, leftResult);
|
||||
}
|
||||
|
||||
// we deliberately ignore the RHS result
|
||||
transform(currentNode.getRight(), currentNode, operationObject, context);
|
||||
if (!currentNode.isUnaryOperator()) {
|
||||
// we deliberately ignore the RHS result
|
||||
transform(currentNode.getRight(), currentNode, operationObject, context);
|
||||
}
|
||||
|
||||
return operationObject;
|
||||
}
|
||||
@@ -299,7 +302,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(java.lang.Class)
|
||||
*/
|
||||
@@ -311,7 +314,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts indexed expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -321,7 +324,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -330,7 +333,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return context.addToPreviousOrReturn(context.getCurrentNode().getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@@ -342,7 +345,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts in-line list expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class InlineListNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
@@ -351,7 +354,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -368,7 +371,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return transform(currentNode.getChild(0), currentNode, null, context);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@@ -380,7 +383,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts property or field reference expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -401,7 +404,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return context.addToPreviousOrReturn(fieldReference);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@@ -413,7 +416,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts literal expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -423,7 +426,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -448,7 +451,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(org.springframework.expression.spel.SpelNode)
|
||||
*/
|
||||
@@ -460,7 +463,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method reference expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -470,7 +473,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -489,7 +492,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
Document dbo = new Document();
|
||||
|
||||
int i = 0;
|
||||
for(ExpressionNode child : node) {
|
||||
for (ExpressionNode child : node) {
|
||||
dbo.put(methodReference.getArgumentMap()[i++], transform(child, context));
|
||||
}
|
||||
args = dbo;
|
||||
@@ -510,7 +513,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method compound expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@@ -520,7 +523,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@@ -537,7 +540,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
return context.addToPreviousOrReturn(currentNode.getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
* Copyright 2013-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,6 +34,7 @@ import org.springframework.expression.spel.ast.StringLiteral;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class LiteralNode extends ExpressionNode {
|
||||
|
||||
@@ -56,7 +57,7 @@ public class LiteralNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Creates a new {@link LiteralNode} from the given {@link Literal} and {@link ExpressionState}.
|
||||
*
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
@@ -67,7 +68,7 @@ public class LiteralNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link ExpressionNode} is a unary minus.
|
||||
*
|
||||
*
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
@@ -78,7 +79,7 @@ public class LiteralNode extends ExpressionNode {
|
||||
}
|
||||
|
||||
OperatorNode operator = (OperatorNode) parent;
|
||||
return operator.isUnaryMinus() && operator.getRight() == null;
|
||||
return operator.isUnaryMinus();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
* Copyright 2013-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,21 +22,7 @@ import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.OpAnd;
|
||||
import org.springframework.expression.spel.ast.OpDivide;
|
||||
import org.springframework.expression.spel.ast.OpEQ;
|
||||
import org.springframework.expression.spel.ast.OpGE;
|
||||
import org.springframework.expression.spel.ast.OpGT;
|
||||
import org.springframework.expression.spel.ast.OpLE;
|
||||
import org.springframework.expression.spel.ast.OpLT;
|
||||
import org.springframework.expression.spel.ast.OpMinus;
|
||||
import org.springframework.expression.spel.ast.OpModulus;
|
||||
import org.springframework.expression.spel.ast.OpMultiply;
|
||||
import org.springframework.expression.spel.ast.OpNE;
|
||||
import org.springframework.expression.spel.ast.OpOr;
|
||||
import org.springframework.expression.spel.ast.OpPlus;
|
||||
import org.springframework.expression.spel.ast.Operator;
|
||||
import org.springframework.expression.spel.ast.OperatorPower;
|
||||
import org.springframework.expression.spel.ast.*;
|
||||
|
||||
/**
|
||||
* An {@link ExpressionNode} representing an operator.
|
||||
@@ -44,6 +30,7 @@ import org.springframework.expression.spel.ast.OperatorPower;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class OperatorNode extends ExpressionNode {
|
||||
|
||||
@@ -102,7 +89,7 @@ public class OperatorNode extends ExpressionNode {
|
||||
this.operator = node;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionNode#isMathematicalOperation()
|
||||
*/
|
||||
@@ -122,16 +109,16 @@ public class OperatorNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Returns whether the operator is unary.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryOperator() {
|
||||
return operator.getRightOperand() == null;
|
||||
return operator.getChildCount() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Mongo expression of the operator.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getMongoOperator() {
|
||||
@@ -147,7 +134,7 @@ public class OperatorNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Returns whether the operator is a unary minus, e.g. -1.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryMinus() {
|
||||
@@ -156,7 +143,7 @@ public class OperatorNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Returns the left operand as {@link ExpressionNode}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getLeft() {
|
||||
@@ -165,7 +152,7 @@ public class OperatorNode extends ExpressionNode {
|
||||
|
||||
/**
|
||||
* Returns the right operand as {@link ExpressionNode}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getRight() {
|
||||
|
||||
@@ -69,6 +69,6 @@ public class GridFsCriteria extends Criteria {
|
||||
* @return
|
||||
*/
|
||||
public static GridFsCriteria whereContentType() {
|
||||
return new GridFsCriteria("metadata.type");
|
||||
return new GridFsCriteria("metadata.".concat(GridFsResource.CONTENT_TYPE_FIELD));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,8 @@ import com.mongodb.gridfs.GridFSDBFile;
|
||||
*/
|
||||
public class GridFsResource extends InputStreamResource {
|
||||
|
||||
static final String CONTENT_TYPE_FIELD = "_contentType";
|
||||
|
||||
private final GridFSFile file;
|
||||
|
||||
/**
|
||||
@@ -44,6 +46,12 @@ public class GridFsResource extends InputStreamResource {
|
||||
this(file, new ByteArrayInputStream(new byte[] {}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsResource} from the given {@link GridFSDBFile} and {@link InputStream}.
|
||||
*
|
||||
* @param file must not be {@literal null}.
|
||||
* @param inputStream must not be {@literal null}.
|
||||
*/
|
||||
public GridFsResource(GridFSFile file, InputStream inputStream) {
|
||||
|
||||
super(inputStream);
|
||||
@@ -91,7 +99,11 @@ public class GridFsResource extends InputStreamResource {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public String getContentType() {
|
||||
return file.getContentType();
|
||||
|
||||
String contentType = file.getMetadata().get(CONTENT_TYPE_FIELD, String.class);
|
||||
|
||||
return contentType != null ? contentType : file.getContentType();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,20 +162,21 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
|
||||
Assert.notNull(content, "InputStream must not be null!");
|
||||
|
||||
GridFSUploadOptions opts = new GridFSUploadOptions();
|
||||
GridFSUploadOptions options = new GridFSUploadOptions();
|
||||
|
||||
Document mData = new Document();
|
||||
|
||||
if (StringUtils.hasText(contentType)) {
|
||||
mData.put("type", contentType);
|
||||
mData.put(GridFsResource.CONTENT_TYPE_FIELD, contentType);
|
||||
}
|
||||
|
||||
if (metadata != null) {
|
||||
mData.putAll(metadata);
|
||||
}
|
||||
|
||||
opts.metadata(mData);
|
||||
options.metadata(mData);
|
||||
|
||||
return getGridFs().uploadFromStream(filename, content, opts);
|
||||
return getGridFs().uploadFromStream(filename, content, options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -228,7 +229,7 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
public GridFsResource getResource(String location) {
|
||||
|
||||
GridFSFile file = findOne(query(whereFilename().is(location)));
|
||||
return file != null ? new GridFsResource(file, getGridFs().openDownloadStreamByName(location)) : null;
|
||||
return file != null ? new GridFsResource(file, getGridFs().openDownloadStream(location)) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -249,7 +250,7 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
List<GridFsResource> resources = new ArrayList<GridFsResource>();
|
||||
|
||||
for (GridFSFile file : files) {
|
||||
resources.add(new GridFsResource(file, getGridFs().openDownloadStreamByName(file.getFilename())));
|
||||
resources.add(new GridFsResource(file, getGridFs().openDownloadStream(file.getFilename())));
|
||||
}
|
||||
|
||||
return resources.toArray(new GridFsResource[resources.size()]);
|
||||
@@ -258,10 +259,6 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
return new GridFsResource[] { getResource(locationPattern) };
|
||||
}
|
||||
|
||||
private Document getMappedQuery(Query query) {
|
||||
return query == null ? new Query().getQueryObject() : getMappedQuery(query.getQueryObject());
|
||||
}
|
||||
|
||||
private Document getMappedQuery(Document query) {
|
||||
return query == null ? null : queryMapper.getMappedObject(query, Optional.empty());
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import org.reactivestreams.Publisher;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.repository.NoRepositoryBean;
|
||||
import org.springframework.data.repository.query.ReactiveQueryByExampleExecutor;
|
||||
import org.springframework.data.repository.reactive.ReactiveSortingRepository;
|
||||
|
||||
/**
|
||||
@@ -31,7 +32,7 @@ import org.springframework.data.repository.reactive.ReactiveSortingRepository;
|
||||
* @since 2.0
|
||||
*/
|
||||
@NoRepositoryBean
|
||||
public interface ReactiveMongoRepository<T, ID> extends ReactiveSortingRepository<T, ID> {
|
||||
public interface ReactiveMongoRepository<T, ID> extends ReactiveSortingRepository<T, ID>, ReactiveQueryByExampleExecutor<T> {
|
||||
|
||||
/**
|
||||
* Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use the
|
||||
|
||||
@@ -93,7 +93,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
* @return
|
||||
*/
|
||||
public boolean hasAnnotatedQuery() {
|
||||
return getAnnotatedQuery() != null;
|
||||
return findAnnotatedQuery().isPresent();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -103,9 +103,15 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
* @return
|
||||
*/
|
||||
String getAnnotatedQuery() {
|
||||
return findAnnotatedQuery().orElse(null);
|
||||
}
|
||||
|
||||
String query = (String) AnnotationUtils.getValue(getQueryAnnotation());
|
||||
return StringUtils.hasText(query) ? query : null;
|
||||
private Optional<String> findAnnotatedQuery() {
|
||||
|
||||
return Optional.ofNullable(getQueryAnnotation()) //
|
||||
.map(AnnotationUtils::getValue) //
|
||||
.map(it -> (String) it) //
|
||||
.filter(StringUtils::hasText);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -115,8 +121,10 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
*/
|
||||
String getFieldSpecification() {
|
||||
|
||||
String value = (String) AnnotationUtils.getValue(getQueryAnnotation(), "fields");
|
||||
return StringUtils.hasText(value) ? value : null;
|
||||
return Optional.ofNullable(getQueryAnnotation()) //
|
||||
.map(it -> (String) AnnotationUtils.getValue(it, "fields")) //
|
||||
.filter(StringUtils::hasText) //
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
* Copyright 2016-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,20 +15,20 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.MonoProcessor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.repository.util.ReactiveWrapperConverters;
|
||||
import org.springframework.data.repository.util.ReactiveWrappers;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.MonoProcessor;
|
||||
|
||||
/**
|
||||
* Reactive {@link org.springframework.data.repository.query.ParametersParameterAccessor} implementation that subscribes
|
||||
* to reactive parameter wrapper types upon creation. This class performs synchronization when acessing parameters.
|
||||
*
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
@@ -55,9 +55,9 @@ class ReactiveMongoParameterAccessor extends MongoParametersParameterAccessor {
|
||||
}
|
||||
|
||||
if (ReactiveWrappers.isSingleValueType(value.getClass())) {
|
||||
subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Mono.class).subscribe());
|
||||
subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Mono.class).toProcessor());
|
||||
} else {
|
||||
subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Flux.class).collectList().subscribe());
|
||||
subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Flux.class).collectList().toProcessor());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
return SimpleReactiveMongoRepository.class;
|
||||
}
|
||||
|
||||
return isQueryDslRepository ? QueryDslMongoRepository.class : SimpleMongoRepository.class;
|
||||
return isQueryDslRepository ? QuerydslMongoRepository.class : SimpleMongoRepository.class;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -17,7 +17,9 @@ package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -34,6 +36,7 @@ import org.springframework.data.repository.core.EntityMetadata;
|
||||
import org.springframework.data.repository.support.PageableExecutionUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.querydsl.core.NonUniqueResultException;
|
||||
import com.querydsl.core.types.EntityPath;
|
||||
import com.querydsl.core.types.Expression;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
@@ -42,13 +45,14 @@ import com.querydsl.core.types.dsl.PathBuilder;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
|
||||
/**
|
||||
* Special QueryDsl based repository implementation that allows execution {@link Predicate}s in various forms.
|
||||
*
|
||||
* Special Querydsl based repository implementation that allows execution {@link Predicate}s in various forms.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID>
|
||||
public class QuerydslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID>
|
||||
implements QuerydslPredicateExecutor<T> {
|
||||
|
||||
private final PathBuilder<T> builder;
|
||||
@@ -56,25 +60,25 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
private final MongoOperations mongoOperations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link QueryDslMongoRepository} for the given {@link EntityMetadata} and {@link MongoTemplate}. Uses
|
||||
* Creates a new {@link QuerydslMongoRepository} for the given {@link EntityMetadata} and {@link MongoTemplate}. Uses
|
||||
* the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the given domain class.
|
||||
*
|
||||
*
|
||||
* @param entityInformation must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
public QueryDslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations) {
|
||||
public QuerydslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations) {
|
||||
this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link QueryDslMongoRepository} for the given {@link MongoEntityInformation}, {@link MongoTemplate}
|
||||
* Creates a new {@link QuerydslMongoRepository} for the given {@link MongoEntityInformation}, {@link MongoTemplate}
|
||||
* and {@link EntityPathResolver}.
|
||||
*
|
||||
*
|
||||
* @param entityInformation must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param resolver must not be {@literal null}.
|
||||
*/
|
||||
public QueryDslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations,
|
||||
public QuerydslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations,
|
||||
EntityPathResolver resolver) {
|
||||
|
||||
super(entityInformation, mongoOperations);
|
||||
@@ -93,11 +97,15 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findById(com.querydsl.core.types.Predicate)
|
||||
*/
|
||||
@Override
|
||||
public T findOne(Predicate predicate) {
|
||||
public Optional<T> findOne(Predicate predicate) {
|
||||
|
||||
Assert.notNull(predicate, "Predicate must not be null!");
|
||||
|
||||
return createQueryFor(predicate).fetchOne();
|
||||
try {
|
||||
return Optional.ofNullable(createQueryFor(predicate).fetchOne());
|
||||
} catch (NonUniqueResultException ex) {
|
||||
throw new IncorrectResultSizeDataAccessException(ex.getMessage(), 1, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -219,7 +227,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
/**
|
||||
* Creates a {@link MongodbQuery} for the given {@link Predicate}.
|
||||
*
|
||||
*
|
||||
* @param predicate
|
||||
* @return
|
||||
*/
|
||||
@@ -238,7 +246,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
/**
|
||||
* Applies the given {@link Pageable} to the given {@link MongodbQuery}.
|
||||
*
|
||||
*
|
||||
* @param query
|
||||
* @param pageable
|
||||
* @return
|
||||
@@ -252,7 +260,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
/**
|
||||
* Applies the given {@link Sort} to the given {@link MongodbQuery}.
|
||||
*
|
||||
*
|
||||
* @param query
|
||||
* @param sort
|
||||
* @return
|
||||
@@ -276,7 +284,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
/**
|
||||
* Transforms a plain {@link Order} into a QueryDsl specific {@link OrderSpecifier}.
|
||||
*
|
||||
*
|
||||
* @param order
|
||||
* @return
|
||||
*/
|
||||
@@ -40,7 +40,7 @@ import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Repository base implementation for Mongo.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
@@ -53,7 +53,7 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}.
|
||||
*
|
||||
*
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
@@ -109,7 +109,7 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.CrudRepository#findOne(java.io.Serializable)
|
||||
* @see org.springframework.data.repository.CrudRepository#findById(java.io.Serializable)
|
||||
*/
|
||||
@Override
|
||||
public Optional<T> findById(ID id) {
|
||||
@@ -312,12 +312,13 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> S findOne(Example<S> example) {
|
||||
public <S extends T> Optional<S> findOne(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
return Optional
|
||||
.ofNullable(mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName()));
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -27,6 +27,7 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
@@ -43,6 +44,7 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@@ -65,17 +67,39 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(reactor.core.publisher.Mono)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(org.reactivestreams.Publisher)
|
||||
*/
|
||||
@Override
|
||||
public Mono<T> findById(Mono<ID> mono) {
|
||||
public Mono<T> findById(Publisher<ID> publisher) {
|
||||
|
||||
Assert.notNull(mono, "The given id must not be null!");
|
||||
Assert.notNull(publisher, "The given id must not be null!");
|
||||
|
||||
return mono.flatMap(
|
||||
return Mono.from(publisher).flatMap(
|
||||
id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> Mono<S> findOne(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
q.limit(2);
|
||||
|
||||
return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()).buffer(2)
|
||||
.flatMap(vals -> {
|
||||
|
||||
if (vals.size() > 1) {
|
||||
return Mono.error(new IncorrectResultSizeDataAccessException(1));
|
||||
}
|
||||
return Mono.just(vals.iterator().next());
|
||||
}).single();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(java.lang.Object)
|
||||
@@ -91,18 +115,35 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(reactor.core.publisher.Mono)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(org.reactivestreams.Publisher)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Boolean> existsById(Mono<ID> mono) {
|
||||
public Mono<Boolean> existsById(Publisher<ID> publisher) {
|
||||
|
||||
Assert.notNull(mono, "The given id must not be null!");
|
||||
Assert.notNull(publisher, "The given id must not be null!");
|
||||
|
||||
return mono.flatMap(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
|
||||
return Mono.from(publisher).flatMap(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
|
||||
entityInformation.getCollectionName()));
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#exists(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> Mono<Boolean> exists(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll()
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> findAll() {
|
||||
return findAll(new Query());
|
||||
@@ -139,6 +180,9 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> findAll(Sort sort) {
|
||||
|
||||
Assert.notNull(sort, "Sort must not be null!");
|
||||
|
||||
return findAll(new Query().with(sort));
|
||||
}
|
||||
|
||||
@@ -163,6 +207,9 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> Flux<S> findAll(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Example must not be null!");
|
||||
|
||||
return findAll(example, Sort.unsorted());
|
||||
}
|
||||
|
||||
@@ -170,10 +217,24 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#count()
|
||||
*/
|
||||
@Override
|
||||
public Mono<Long> count() {
|
||||
return mongoOperations.count(new Query(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#count(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> Mono<Long> count(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Object)
|
||||
@@ -272,6 +333,19 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
.remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName()).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteById(org.reactivestreams.Publisher)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> deleteById(Publisher<ID> publisher) {
|
||||
|
||||
Assert.notNull(publisher, "Id must not be null!");
|
||||
|
||||
return Mono.from(publisher).flatMap(id -> mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType(),
|
||||
entityInformation.getCollectionName())).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#delete(java.lang.Object)
|
||||
@@ -315,34 +389,11 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAll()
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> deleteAll() {
|
||||
return mongoOperations.remove(new Query(), entityInformation.getCollectionName()).then(Mono.empty());
|
||||
}
|
||||
|
||||
public <S extends T> Mono<Boolean> exists(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
public <S extends T> Mono<S> findOne(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
public <S extends T> Mono<Long> count(Example<S> example) {
|
||||
|
||||
Assert.notNull(example, "Sample must not be null!");
|
||||
|
||||
Query q = new Query(new Criteria().alike(example));
|
||||
return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
private Query getIdQuery(Object id) {
|
||||
return new Query(getIdCriteria(id));
|
||||
}
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableAggregationOperation.aggregateAndReturn] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableAggregationOperation.aggregateAndReturn(entityClass: KClass<T>): ExecutableAggregationOperation.AggregationOperation<T> =
|
||||
aggregateAndReturn(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableAggregationOperation.aggregateAndReturn(): ExecutableAggregationOperation.AggregationOperation<T> =
|
||||
aggregateAndReturn(T::class.java)
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.query] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableFindOperation.query(entityClass: KClass<T>): ExecutableFindOperation.FindOperation<T> =
|
||||
query(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.query] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableFindOperation.query(): ExecutableFindOperation.FindOperation<T> =
|
||||
query(T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.FindOperationWithProjection.as] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableFindOperation.FindOperationWithProjection<T>.asType(resultType: KClass<T>): ExecutableFindOperation.FindOperationWithQuery<T> =
|
||||
`as`(resultType.java)
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.FindOperationWithProjection.as] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableFindOperation.FindOperationWithProjection<T>.asType(): ExecutableFindOperation.FindOperationWithQuery<T> =
|
||||
`as`(T::class.java)
|
||||
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableInsertOperation.insert] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableInsertOperation.insert(entityClass: KClass<T>): ExecutableInsertOperation.InsertOperation<T> =
|
||||
insert(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableInsertOperation.insert] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableInsertOperation.insert(): ExecutableInsertOperation.InsertOperation<T> =
|
||||
insert(T::class.java)
|
||||
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableRemoveOperation.remove] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableRemoveOperation.remove(entityClass: KClass<T>): ExecutableRemoveOperation.RemoveOperation<T> =
|
||||
remove(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableRemoveOperation.remove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableRemoveOperation.remove(): ExecutableRemoveOperation.RemoveOperation<T> =
|
||||
remove(T::class.java)
|
||||
@@ -0,0 +1,466 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import com.mongodb.client.MongoCollection
|
||||
import com.mongodb.client.result.DeleteResult
|
||||
import com.mongodb.client.result.UpdateResult
|
||||
import org.bson.Document
|
||||
import org.springframework.data.geo.GeoResults
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupByResults
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults
|
||||
import org.springframework.data.mongodb.core.query.Criteria
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
import org.springframework.data.mongodb.core.query.Update
|
||||
import org.springframework.data.util.CloseableIterator
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.getCollectionName] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.getCollectionName(entityClass: KClass<T>): String =
|
||||
getCollectionName(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.getCollectionName] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.getCollectionName(): String =
|
||||
getCollectionName(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.execute] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.execute(action: CollectionCallback<T>): T =
|
||||
execute(T::class.java, action)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.stream] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.stream(query: Query): CloseableIterator<T> =
|
||||
stream(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.stream] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.stream(query: Query, collectionName: String? = null): CloseableIterator<T> =
|
||||
if (collectionName != null) stream(query, T::class.java, collectionName)
|
||||
else stream(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.createCollection] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.createCollection(entityClass: KClass<T>, collectionOptions: CollectionOptions? = null): MongoCollection<Document> =
|
||||
if (collectionOptions != null) createCollection(entityClass.java, collectionOptions)
|
||||
else createCollection(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.createCollection] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.createCollection(
|
||||
collectionOptions: CollectionOptions? = null): MongoCollection<Document> =
|
||||
if (collectionOptions != null) createCollection(T::class.java, collectionOptions)
|
||||
else createCollection(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.collectionExists] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.collectionExists(entityClass: KClass<T>): Boolean =
|
||||
collectionExists(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.collectionExists] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.collectionExists(): Boolean =
|
||||
collectionExists(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.dropCollection] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.dropCollection(entityClass: KClass<T>) {
|
||||
dropCollection(entityClass.java)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.dropCollection] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.dropCollection() {
|
||||
dropCollection(T::class.java)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.indexOps] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.indexOps(entityClass: KClass<T>): IndexOperations =
|
||||
indexOps(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.indexOps] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.indexOps(): IndexOperations =
|
||||
indexOps(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.bulkOps] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.bulkOps(bulkMode: BulkMode, entityClass: KClass<T>, collectionName: String? = null): BulkOperations =
|
||||
if (collectionName != null) bulkOps(bulkMode, entityClass.java, collectionName)
|
||||
else bulkOps(bulkMode, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.bulkOps] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.bulkOps(bulkMode: BulkMode, collectionName: String? = null): BulkOperations =
|
||||
if (collectionName != null) bulkOps(bulkMode, T::class.java, collectionName)
|
||||
else bulkOps(bulkMode, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findAll] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAll(collectionName: String? = null): List<T> =
|
||||
if (collectionName != null) findAll(T::class.java, collectionName) else findAll(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.group] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.group(inputCollectionName: String, groupBy: GroupBy): GroupByResults<T> =
|
||||
group(inputCollectionName, groupBy, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.group] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.group(criteria: Criteria, inputCollectionName: String, groupBy: GroupBy): GroupByResults<T> =
|
||||
group(criteria, inputCollectionName, groupBy, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified O : Any> MongoOperations.aggregate(aggregation: Aggregation, inputType: KClass<*>): AggregationResults<O> =
|
||||
aggregate(aggregation, inputType.java, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified O : Any> MongoOperations.aggregate(aggregation: Aggregation, collectionName: String): AggregationResults<O> =
|
||||
aggregate(aggregation, collectionName, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.aggregateStream] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified O : Any> MongoOperations.aggregateStream(aggregation: Aggregation, inputType: KClass<*>): CloseableIterator<O> =
|
||||
aggregateStream(aggregation, inputType.java, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.aggregateStream] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified O : Any> MongoOperations.aggregateStream(aggregation: Aggregation, collectionName: String): CloseableIterator<O> =
|
||||
aggregateStream(aggregation, collectionName, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.mapReduce] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.mapReduce(collectionName: String, mapFunction: String, reduceFunction: String, options: MapReduceOptions? = null): MapReduceResults<T> =
|
||||
if (options != null) mapReduce(collectionName, mapFunction, reduceFunction, options, T::class.java)
|
||||
else mapReduce(collectionName, mapFunction, reduceFunction, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.mapReduce] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 52.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.mapReduce(query: Query, collectionName: String, mapFunction: String, reduceFunction: String, options: MapReduceOptions? = null): MapReduceResults<T> =
|
||||
if (options != null) mapReduce(query, collectionName, mapFunction, reduceFunction, options, T::class.java)
|
||||
else mapReduce(query, collectionName, mapFunction, reduceFunction, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.geoNear] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.geoNear(near: NearQuery, collectionName: String? = null): GeoResults<T> =
|
||||
if (collectionName != null) geoNear(near, T::class.java, collectionName)
|
||||
else geoNear(near, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findOne] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findOne(query: Query, collectionName: String? = null): T =
|
||||
if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.exists] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.exists(query: Query, entityClass: KClass<T>, collectionName: String? = null): Boolean =
|
||||
if (collectionName != null) exists(query, entityClass.java, collectionName)
|
||||
else exists(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.exists] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.exists(query: Query, collectionName: String? = null): Boolean =
|
||||
if (collectionName != null) exists(query, T::class.java, collectionName)
|
||||
else exists(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.find] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.find(query: Query, collectionName: String? = null): List<T> =
|
||||
if (collectionName != null) find(query, T::class.java, collectionName)
|
||||
else find(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findById] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findById(id: Any, collectionName: String? = null): T =
|
||||
if (collectionName != null) findById(id, T::class.java, collectionName)
|
||||
else findById(id, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findAndModify] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): T =
|
||||
if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName)
|
||||
else findAndModify(query, update, options, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findAndRemove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAndRemove(query: Query, collectionName: String? = null): T =
|
||||
if (collectionName != null) findAndRemove(query, T::class.java, collectionName)
|
||||
else findAndRemove(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.count] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.count(query: Query = Query(), entityClass: KClass<T>, collectionName: String? = null): Long =
|
||||
if (collectionName != null) count(query, entityClass.java, collectionName)
|
||||
else count(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.count] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.count(query: Query = Query(), collectionName: String? = null): Long =
|
||||
if (collectionName != null) count(query, T::class.java, collectionName) else count(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.insert] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.insert(batchToSave: Collection<T>, entityClass: KClass<T>) {
|
||||
insert(batchToSave, entityClass.java)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.upsert] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.upsert(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) upsert(query, update, entityClass.java, collectionName)
|
||||
else upsert(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.upsert] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.upsert(query: Query, update: Update, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) upsert(query, update, T::class.java, collectionName)
|
||||
else upsert(query, update, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.updateFirst] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.updateFirst(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) updateFirst(query, update, entityClass.java, collectionName)
|
||||
else updateFirst(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.updateFirst] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.updateFirst(query: Query, update: Update, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) updateFirst(query, update, T::class.java, collectionName)
|
||||
else updateFirst(query, update, T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.updateMulti] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.updateMulti(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) updateMulti(query, update, entityClass.java, collectionName)
|
||||
else updateMulti(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.updateMulti] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.updateMulti(query: Query, update: Update, collectionName: String? = null): UpdateResult =
|
||||
if (collectionName != null) updateMulti(query, update, T::class.java, collectionName)
|
||||
else updateMulti(query, update, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.remove] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> MongoOperations.remove(query: Query, entityClass: KClass<T>, collectionName: String? = null): DeleteResult =
|
||||
if (collectionName != null) remove(query, entityClass.java, collectionName)
|
||||
else remove(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.remove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> MongoOperations.remove(query: Query, collectionName: String? = null): DeleteResult =
|
||||
if (collectionName != null) remove(query, T::class.java, collectionName)
|
||||
else remove(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.findAllAndRemove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAllAndRemove(query: Query): List<T> =
|
||||
findAllAndRemove(query, T::class.java)
|
||||
@@ -0,0 +1,334 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import com.mongodb.client.result.DeleteResult
|
||||
import com.mongodb.client.result.UpdateResult
|
||||
import com.mongodb.reactivestreams.client.MongoCollection
|
||||
import org.bson.Document
|
||||
import org.springframework.data.geo.GeoResult
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
import org.springframework.data.mongodb.core.query.Update
|
||||
import reactor.core.publisher.Flux
|
||||
import reactor.core.publisher.Mono
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.indexOps] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.indexOps(entityClass: KClass<T>): ReactiveIndexOperations =
|
||||
indexOps(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.indexOps] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.indexOps(): ReactiveIndexOperations =
|
||||
indexOps(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.execute] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.execute(action: ReactiveCollectionCallback<T>): Flux<T> =
|
||||
execute(T::class.java, action)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.createCollection] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.createCollection(entityClass: KClass<T>, collectionOptions: CollectionOptions? = null): Mono<MongoCollection<Document>> =
|
||||
if (collectionOptions != null) createCollection(entityClass.java, collectionOptions) else createCollection(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.createCollection] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.createCollection(collectionOptions: CollectionOptions? = null): Mono<MongoCollection<Document>> =
|
||||
if (collectionOptions != null) createCollection(T::class.java, collectionOptions) else createCollection(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.collectionExists] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.collectionExists(entityClass: KClass<T>): Mono<Boolean> =
|
||||
collectionExists(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.collectionExists] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.collectionExists(): Mono<Boolean> =
|
||||
collectionExists(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.dropCollection] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.dropCollection(entityClass: KClass<T>): Mono<Void> =
|
||||
dropCollection(entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.dropCollection] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.dropCollection(): Mono<Void> =
|
||||
dropCollection(T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findAll] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findAll(collectionName: String? = null): Flux<T> =
|
||||
if (collectionName != null) findAll(T::class.java, collectionName) else findAll(T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findOne] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findOne(query: Query, collectionName: String? = null): Mono<T> =
|
||||
if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.exists] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.exists(query: Query, entityClass: KClass<T>, collectionName: String? = null): Mono<Boolean> =
|
||||
if (collectionName != null) exists(query, entityClass.java, collectionName) else exists(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.exists] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.exists(query: Query, collectionName: String? = null): Mono<Boolean> =
|
||||
if (collectionName != null) exists(query, T::class.java, collectionName) else exists(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.find] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.find(query: Query, collectionName: String? = null): Flux<T> =
|
||||
if (collectionName != null) find(query, T::class.java, collectionName) else find(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findById] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findById(id: Any, collectionName: String? = null): Mono<T> =
|
||||
if (collectionName != null) findById(id, T::class.java, collectionName) else findById(id, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.geoNear] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.geoNear(near: NearQuery, collectionName: String? = null): Flux<GeoResult<T>> =
|
||||
if (collectionName != null) geoNear(near, T::class.java, collectionName) else geoNear(near, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findAndModify] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): Mono<T> =
|
||||
if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName) else findAndModify(query, update, options, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findAndRemove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findAndRemove(query: Query, collectionName: String? = null): Mono<T> =
|
||||
if (collectionName != null) findAndRemove(query, T::class.java, collectionName)
|
||||
else findAndRemove(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.count] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.count(query: Query = Query(), entityClass: KClass<T>, collectionName: String? = null): Mono<Long> =
|
||||
if (collectionName != null) count(query, entityClass.java, collectionName)
|
||||
else count(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.count] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.count(query: Query = Query(), collectionName: String? = null): Mono<Long> =
|
||||
if (collectionName != null) count(query, T::class.java, collectionName)
|
||||
else count(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.insert] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.insert(batchToSave: Collection<T>, entityClass: KClass<T>): Flux<T> =
|
||||
insert(batchToSave, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.insertAll] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.insertAll(batchToSave: Mono<out Collection<T>>, entityClass: KClass<T>): Flux<T> =
|
||||
insertAll(batchToSave, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.upsert] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.upsert(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) upsert(query, update, entityClass.java, collectionName) else upsert(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.upsert] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.upsert(query: Query, update: Update, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) upsert(query, update, T::class.java, collectionName)
|
||||
else upsert(query, update, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.updateFirst] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.updateFirst(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) updateFirst(query, update, entityClass.java, collectionName)
|
||||
else updateFirst(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.updateFirst] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.updateFirst(query: Query, update: Update, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) updateFirst(query, update, T::class.java, collectionName)
|
||||
else updateFirst(query, update, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.updateMulti] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.updateMulti(query: Query, update: Update, entityClass: KClass<T>, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) updateMulti(query, update, entityClass.java, collectionName)
|
||||
else updateMulti(query, update, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.updateMulti] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.updateMulti(query: Query, update: Update, collectionName: String? = null): Mono<UpdateResult> =
|
||||
if (collectionName != null) updateMulti(query, update, T::class.java, collectionName)
|
||||
else updateMulti(query, update, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.remove] providing a [KClass] based variant.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveMongoOperations.remove(query: Query, entityClass: KClass<T>, collectionName: String? = null): Mono<DeleteResult> =
|
||||
if (collectionName != null) remove(query, entityClass.java, collectionName)
|
||||
else remove(query, entityClass.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.remove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.remove(query: Query, collectionName: String? = null): Mono<DeleteResult> =
|
||||
if (collectionName != null) remove(query, T::class.java, collectionName)
|
||||
else remove(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.findAllAndRemove] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
@Suppress("EXTENSION_SHADOWED_BY_MEMBER")
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.findAllAndRemove(query: Query): Flux<T> =
|
||||
findAllAndRemove(query, T::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.tail] leveraging reified type parameters.
|
||||
*
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMongoOperations.tail(query: Query, collectionName: String? = null): Flux<T> =
|
||||
if (collectionName != null) tail(query, T::class.java, collectionName) else tail(query, T::class.java)
|
||||
@@ -0,0 +1,151 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ExecutableAggregationOperationSupport}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ExecutableAggregationOperationSupportUnitTests {
|
||||
|
||||
@Mock MongoTemplate template;
|
||||
ExecutableAggregationOperationSupport opSupport;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
opSupport = new ExecutableAggregationOperationSupport(template);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void throwsExceptionOnNullDomainType() {
|
||||
opSupport.aggregateAndReturn(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void throwsExceptionOnNullCollectionWhenUsed() {
|
||||
opSupport.aggregateAndReturn(Person.class).inCollection(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void throwsExceptionOnEmptyCollectionWhenUsed() {
|
||||
opSupport.aggregateAndReturn(Person.class).inCollection("");
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void throwsExceptionOnNullAggregation() {
|
||||
opSupport.aggregateAndReturn(Person.class).by(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateWithUntypedAggregationAndExplicitCollection() {
|
||||
|
||||
opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
verify(template).aggregate(any(Aggregation.class), eq("star-wars"), captor.capture());
|
||||
assertThat(captor.getValue()).isEqualTo(Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateWithUntypedAggregation() {
|
||||
|
||||
when(template.determineCollectionName(any(Class.class))).thenReturn("person");
|
||||
|
||||
opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).all();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(captor.capture());
|
||||
verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateWithTypeAggregation() {
|
||||
|
||||
when(template.determineCollectionName(any(Class.class))).thenReturn("person");
|
||||
|
||||
opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).all();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(captor.capture());
|
||||
verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateStreamWithUntypedAggregationAndExplicitCollection() {
|
||||
|
||||
opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).stream();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
verify(template).aggregateStream(any(Aggregation.class), eq("star-wars"), captor.capture());
|
||||
assertThat(captor.getValue()).isEqualTo(Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateStreamWithUntypedAggregation() {
|
||||
|
||||
when(template.determineCollectionName(any(Class.class))).thenReturn("person");
|
||||
|
||||
opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).stream();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(captor.capture());
|
||||
verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void aggregateStreamWithTypeAggregation() {
|
||||
|
||||
when(template.determineCollectionName(any(Class.class))).thenReturn("person");
|
||||
|
||||
opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).stream();
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(captor.capture());
|
||||
verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class);
|
||||
}
|
||||
|
||||
static class Person {}
|
||||
|
||||
static class Jedi {}
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link ExecutableFindOperationSupport}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class ExecutableFindOperationSupportTests {
|
||||
|
||||
private static final String STAR_WARS = "star-wars";
|
||||
MongoTemplate template;
|
||||
|
||||
Person han;
|
||||
Person luke;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableFindOperationSupportTests"));
|
||||
template.dropCollection(STAR_WARS);
|
||||
|
||||
han = new Person();
|
||||
han.firstname = "han";
|
||||
han.id = "id-1";
|
||||
|
||||
luke = new Person();
|
||||
luke.firstname = "luke";
|
||||
luke.id = "id-2";
|
||||
|
||||
template.save(han);
|
||||
template.save(luke);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void domainTypeIsRequired() {
|
||||
template.query(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void returnTypeIsRequiredOnSet() {
|
||||
template.query(Person.class).as(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void collectionIsRequiredOnSet() {
|
||||
template.query(Person.class).inCollection(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAll() {
|
||||
assertThat(template.query(Person.class).all()).containsExactlyInAnyOrder(han, luke);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllWithCollection() {
|
||||
assertThat(template.query(Human.class).inCollection(STAR_WARS).all()).hasSize(2);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllWithProjection() {
|
||||
assertThat(template.query(Person.class).as(Jedi.class).all()).hasOnlyElementsOfType(Jedi.class).hasSize(2);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllBy() {
|
||||
|
||||
assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).all())
|
||||
.containsExactlyInAnyOrder(luke);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllByWithCollectionUsingMappingInformation() {
|
||||
|
||||
assertThat(template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all())
|
||||
.hasSize(1).hasOnlyElementsOfType(Jedi.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllByWithCollection() {
|
||||
assertThat(template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all())
|
||||
.hasSize(1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllByWithProjection() {
|
||||
|
||||
assertThat(template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all())
|
||||
.hasOnlyElementsOfType(Jedi.class).hasSize(1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findBy() {
|
||||
assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).one()).contains(luke);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findByNoMatch() {
|
||||
assertThat(template.query(Person.class).matching(query(where("firstname").is("spock"))).one()).isEmpty();
|
||||
}
|
||||
|
||||
@Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1563
|
||||
public void findByTooManyResults() {
|
||||
template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void streamAll() {
|
||||
|
||||
try (CloseableIterator<Person> stream = template.query(Person.class).stream()) {
|
||||
assertThat(stream).containsExactlyInAnyOrder(han, luke);
|
||||
}
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void streamAllWithCollection() {
|
||||
|
||||
try (CloseableIterator<Human> stream = template.query(Human.class).inCollection(STAR_WARS).stream()) {
|
||||
assertThat(stream).hasSize(2);
|
||||
}
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void streamAllWithProjection() {
|
||||
|
||||
try (CloseableIterator<Jedi> stream = template.query(Person.class).as(Jedi.class).stream()) {
|
||||
assertThat(stream).hasOnlyElementsOfType(Jedi.class).hasSize(2);
|
||||
}
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void streamAllBy() {
|
||||
|
||||
try (CloseableIterator<Person> stream = template.query(Person.class).matching(query(where("firstname").is("luke")))
|
||||
.stream()) {
|
||||
|
||||
assertThat(stream).containsExactlyInAnyOrder(luke);
|
||||
}
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllNearBy() {
|
||||
|
||||
template.indexOps(Planet.class).ensureIndex(
|
||||
new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx"));
|
||||
|
||||
Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538));
|
||||
Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193));
|
||||
|
||||
template.save(alderan);
|
||||
template.save(dantooine);
|
||||
|
||||
GeoResults<Planet> results = template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true))
|
||||
.all();
|
||||
assertThat(results.getContent()).hasSize(2);
|
||||
assertThat(results.getContent().get(0).getDistance()).isNotNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAllNearByWithCollectionAndProjection() {
|
||||
|
||||
template.indexOps(Planet.class).ensureIndex(
|
||||
new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx"));
|
||||
|
||||
Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538));
|
||||
Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193));
|
||||
|
||||
template.save(alderan);
|
||||
template.save(dantooine);
|
||||
|
||||
GeoResults<Human> results = template.query(Object.class).inCollection(STAR_WARS).as(Human.class)
|
||||
.near(NearQuery.near(-73.9667, 40.78).spherical(true)).all();
|
||||
|
||||
assertThat(results.getContent()).hasSize(2);
|
||||
assertThat(results.getContent().get(0).getDistance()).isNotNull();
|
||||
assertThat(results.getContent().get(0).getContent()).isInstanceOf(Human.class);
|
||||
assertThat(results.getContent().get(0).getContent().getId()).isEqualTo("alderan");
|
||||
}
|
||||
|
||||
@Data
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS)
|
||||
static class Person {
|
||||
@Id String id;
|
||||
String firstname;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class Human {
|
||||
@Id String id;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class Jedi {
|
||||
|
||||
@Field("firstname") String name;
|
||||
}
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS)
|
||||
static class Planet {
|
||||
|
||||
@Id String name;
|
||||
Point coordinates;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.mockito.Mockito.anyList;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ExecutableInsertOperationSupport}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ExecutableInsertOperationSupportUnitTests {
|
||||
|
||||
private static final String STAR_WARS = "star-wars";
|
||||
|
||||
@Mock MongoTemplate template;
|
||||
@Mock BulkOperations bulkOperations;
|
||||
|
||||
ExecutableInsertOperationSupport ops;
|
||||
|
||||
Person luke, han;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations);
|
||||
when(template.determineCollectionName(any(Class.class))).thenReturn(STAR_WARS);
|
||||
when(bulkOperations.insert(anyList())).thenReturn(bulkOperations);
|
||||
|
||||
ops = new ExecutableInsertOperationSupport(template);
|
||||
|
||||
luke = new Person();
|
||||
luke.id = "id-1";
|
||||
luke.firstname = "luke";
|
||||
|
||||
han = new Person();
|
||||
han.firstname = "han";
|
||||
han.id = "id-2";
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void nullCollectionShouldThrowException() {
|
||||
ops.insert(Person.class).inCollection(null);
|
||||
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void nullBulkModeShouldThrowException() {
|
||||
ops.insert(Person.class).withBulkMode(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void insertShouldUseDerivedCollectionName() {
|
||||
|
||||
ops.insert(Person.class).one(luke);
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(captor.capture());
|
||||
verify(template).insert(eq(luke), eq(STAR_WARS));
|
||||
|
||||
assertThat(captor.getAllValues()).containsExactly(Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void insertShouldUseExplicitCollectionName() {
|
||||
|
||||
ops.insert(Person.class).inCollection(STAR_WARS).one(luke);
|
||||
|
||||
verify(template, never()).determineCollectionName(any(Class.class));
|
||||
verify(template).insert(eq(luke), eq(STAR_WARS));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void insertCollectionShouldDelegateCorrectly() {
|
||||
|
||||
ops.insert(Person.class).all(Arrays.asList(luke, han));
|
||||
|
||||
verify(template).determineCollectionName(any(Class.class));
|
||||
verify(template).insert(anyList(), eq(STAR_WARS));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void bulkInsertCollectionShouldDelegateCorrectly() {
|
||||
|
||||
ops.insert(Person.class).bulk(Arrays.asList(luke, han));
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(any(Class.class));
|
||||
verify(template).bulkOps(eq(BulkMode.ORDERED), captor.capture(), eq(STAR_WARS));
|
||||
verify(bulkOperations).insert(anyList());
|
||||
verify(bulkOperations).execute();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void bulkInsertWithBulkModeShouldDelegateCorrectly() {
|
||||
|
||||
ops.insert(Person.class).withBulkMode(BulkMode.UNORDERED).bulk(Arrays.asList(luke, han));
|
||||
|
||||
ArgumentCaptor<Class> captor = ArgumentCaptor.forClass(Class.class);
|
||||
|
||||
verify(template).determineCollectionName(any(Class.class));
|
||||
verify(template).bulkOps(eq(BulkMode.UNORDERED), captor.capture(), eq(STAR_WARS));
|
||||
verify(bulkOperations).insert(anyList());
|
||||
verify(bulkOperations).execute();
|
||||
}
|
||||
|
||||
@Data
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS)
|
||||
static class Person {
|
||||
@Id String id;
|
||||
String firstname;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,111 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link ExecutableRemoveOperationSupport}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class ExecutableRemoveOperationSupportTests {
|
||||
|
||||
private static final String STAR_WARS = "star-wars";
|
||||
MongoTemplate template;
|
||||
|
||||
Person han;
|
||||
Person luke;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableRemoveOperationSupportTests"));
|
||||
template.dropCollection(STAR_WARS);
|
||||
|
||||
han = new Person();
|
||||
han.firstname = "han";
|
||||
han.id = "id-1";
|
||||
|
||||
luke = new Person();
|
||||
luke.firstname = "luke";
|
||||
luke.id = "id-2";
|
||||
|
||||
template.save(han);
|
||||
template.save(luke);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void removeAll() {
|
||||
|
||||
DeleteResult result = template.remove(Person.class).all();
|
||||
|
||||
assertThat(result.getDeletedCount()).isEqualTo(2L);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void removeAllMatching() {
|
||||
|
||||
DeleteResult result = template.remove(Person.class).matching(query(where("firstname").is("han"))).all();
|
||||
|
||||
assertThat(result.getDeletedCount()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void removeAllMatchingWithAlternateDomainTypeAndCollection() {
|
||||
|
||||
DeleteResult result = template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke")))
|
||||
.all();
|
||||
|
||||
assertThat(result.getDeletedCount()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void removeAndReturnAllMatching() {
|
||||
|
||||
List<Person> result = template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove();
|
||||
|
||||
assertThat(result).containsExactly(han);
|
||||
}
|
||||
|
||||
@Data
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS)
|
||||
static class Person {
|
||||
@Id String id;
|
||||
String firstname;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class Jedi {
|
||||
|
||||
@Field("firstname") String name;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonString;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link ExecutableUpdateOperationSupport}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class ExecutableUpdateOperationSupportTests {
|
||||
|
||||
private static final String STAR_WARS = "star-wars";
|
||||
MongoTemplate template;
|
||||
|
||||
Person han;
|
||||
Person luke;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableUpdateOperationSupportTests"));
|
||||
template.dropCollection(STAR_WARS);
|
||||
|
||||
han = new Person();
|
||||
han.firstname = "han";
|
||||
han.id = "id-1";
|
||||
|
||||
luke = new Person();
|
||||
luke.firstname = "luke";
|
||||
luke.id = "id-2";
|
||||
|
||||
template.save(han);
|
||||
template.save(luke);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void domainTypeIsRequired() {
|
||||
template.update(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void updateIsRequired() {
|
||||
template.update(Person.class).apply(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void collectionIsRequiredOnSet() {
|
||||
template.update(Person.class).inCollection(null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1563
|
||||
public void findAndModifyOptionsAreRequiredOnSet() {
|
||||
template.update(Person.class).apply(new Update()).withOptions(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void updateFirst() {
|
||||
|
||||
UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).first();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(1L);
|
||||
assertThat(result.getUpsertedId()).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void updateAll() {
|
||||
|
||||
UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).all();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(2L);
|
||||
assertThat(result.getUpsertedId()).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void updateAllMatching() {
|
||||
|
||||
UpdateResult result = template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han"))
|
||||
.all();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(1L);
|
||||
assertThat(result.getUpsertedId()).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void updateWithDifferentDomainClassAndCollection() {
|
||||
|
||||
UpdateResult result = template.update(Jedi.class).inCollection(STAR_WARS)
|
||||
.matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).all();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(1L);
|
||||
assertThat(result.getUpsertedId()).isNull();
|
||||
assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname",
|
||||
"Han");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAndModify() {
|
||||
|
||||
Optional<Person> result = template.update(Person.class).matching(queryHan())
|
||||
.apply(new Update().set("firstname", "Han")).findAndModify();
|
||||
|
||||
assertThat(result).contains(han);
|
||||
assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname",
|
||||
"Han");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAndModifyWithDifferentDomainTypeAndCollection() {
|
||||
|
||||
Optional<Jedi> result = template.update(Jedi.class).inCollection(STAR_WARS)
|
||||
.matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).findAndModify();
|
||||
|
||||
assertThat(result.get()).hasFieldOrPropertyWithValue("name", "han");
|
||||
assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname",
|
||||
"Han");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void findAndModifyWithOptions() {
|
||||
|
||||
Optional<Person> result = template.update(Person.class).matching(queryHan())
|
||||
.apply(new Update().set("firstname", "Han")).withOptions(FindAndModifyOptions.options().returnNew(true))
|
||||
.findAndModify();
|
||||
|
||||
assertThat(result.get()).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Han");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1563
|
||||
public void upsert() {
|
||||
|
||||
UpdateResult result = template.update(Person.class).matching(query(where("id").is("id-3")))
|
||||
.apply(new Update().set("firstname", "Chewbacca")).upsert();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(0L);
|
||||
assertThat(result.getUpsertedId()).isEqualTo(new BsonString("id-3"));
|
||||
}
|
||||
|
||||
private Query queryHan() {
|
||||
return query(where("id").is(han.getId()));
|
||||
}
|
||||
|
||||
@Data
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS)
|
||||
static class Person {
|
||||
@Id String id;
|
||||
String firstname;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class Human {
|
||||
@Id String id;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class Jedi {
|
||||
|
||||
@Field("firstname") String name;
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,7 @@ import com.mongodb.DBRef;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public abstract class MongoOperationsUnitTests {
|
||||
@@ -137,7 +138,7 @@ public abstract class MongoOperationsUnitTests {
|
||||
new Execution() {
|
||||
@Override
|
||||
public void doWith(MongoOperations operations) {
|
||||
operations.createCollection("foo", new CollectionOptions(1, 1, true));
|
||||
operations.createCollection("foo", CollectionOptions.empty().size(1).maxDocuments(1).capped());
|
||||
}
|
||||
}.assertDataAccessException();
|
||||
}
|
||||
|
||||
@@ -114,8 +114,6 @@ public class MongoTemplateTests {
|
||||
|
||||
private static final org.springframework.data.util.Version TWO_DOT_FOUR = org.springframework.data.util.Version
|
||||
.parse("2.4");
|
||||
private static final org.springframework.data.util.Version TWO_DOT_EIGHT = org.springframework.data.util.Version
|
||||
.parse("2.8");
|
||||
private static final org.springframework.data.util.Version THREE_DOT_FOUR = org.springframework.data.util.Version
|
||||
.parse("3.4");
|
||||
|
||||
@@ -333,6 +331,26 @@ public class MongoTemplateTests {
|
||||
template.insertAll(records);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1687
|
||||
public void createCappedCollection() {
|
||||
|
||||
template.createCollection(Person.class, CollectionOptions.empty().capped().size(1000).maxDocuments(1000));
|
||||
|
||||
org.bson.Document collectionOptions = getCollectionInfo(template.getCollectionName(Person.class)).get("options",
|
||||
org.bson.Document.class);
|
||||
assertThat(collectionOptions.get("capped"), is(true));
|
||||
}
|
||||
|
||||
private org.bson.Document getCollectionInfo(String collectionName) {
|
||||
|
||||
return template.execute(db -> {
|
||||
|
||||
org.bson.Document result = db.runCommand(new org.bson.Document().append("listCollections", 1).append("filter",
|
||||
new org.bson.Document("name", collectionName)));
|
||||
return (org.bson.Document) result.get("cursor", org.bson.Document.class).get("firstBatch", List.class).get(0);
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testEnsureIndex() throws Exception {
|
||||
@@ -1141,7 +1159,7 @@ public class MongoTemplateTests {
|
||||
|
||||
@Test // DATADOC-166
|
||||
public void removingNullIsANoOp() {
|
||||
template.remove(null);
|
||||
template.remove((Object) null);
|
||||
}
|
||||
|
||||
@Test // DATADOC-240, DATADOC-212
|
||||
|
||||
@@ -40,6 +40,7 @@ import com.mongodb.MongoClient;
|
||||
* Integration tests for DATAMONGO-1289.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
@@ -69,7 +70,7 @@ public class NoExplicitIdTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1289
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaTemplate() {
|
||||
public void saveAndRetrieveTypeWithoutIdPropertyViaTemplate() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
@@ -83,7 +84,7 @@ public class NoExplicitIdTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1289
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaRepository() {
|
||||
public void saveAndRetrieveTypeWithoutIdPropertyViaRepository() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
@@ -96,7 +97,7 @@ public class NoExplicitIdTests {
|
||||
|
||||
@Test // DATAMONGO-1289
|
||||
@SuppressWarnings("unchecked")
|
||||
public void saveAndRetrieveTypeWithoutIdPorpertyViaRepositoryFindOne() {
|
||||
public void saveAndRetrieveTypeWithoutIdPropertyViaRepositoryFindOne() {
|
||||
|
||||
TypeWithoutIdProperty noid = new TypeWithoutIdProperty();
|
||||
noid.someString = "o.O";
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration;
|
||||
import org.springframework.data.mongodb.test.util.MongoVersionRule;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
public class ReactiveMongoTemplateCollationTests {
|
||||
|
||||
public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_4_0 = MongoVersionRule.atLeast(Version.parse("3.4.0"));
|
||||
public static final String COLLECTION_NAME = "collation-1";
|
||||
|
||||
@Configuration
|
||||
static class Config extends AbstractReactiveMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public com.mongodb.reactivestreams.client.MongoClient mongoClient() {
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "collation-tests";
|
||||
}
|
||||
}
|
||||
|
||||
@Autowired ReactiveMongoTemplate template;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
StepVerifier.create(template.dropCollection(COLLECTION_NAME)).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1693
|
||||
public void createCollectionWithCollation() {
|
||||
|
||||
StepVerifier.create(template.createCollection(COLLECTION_NAME, CollectionOptions.just(Collation.of("en_US")))) //
|
||||
.expectNextCount(1) //
|
||||
.verifyComplete();
|
||||
|
||||
Mono<Document> collation = getCollationInfo(COLLECTION_NAME);
|
||||
StepVerifier.create(collation) //
|
||||
.consumeNextWith(document -> assertThat(document.get("locale")).isEqualTo("en_US")) //
|
||||
.verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
private Mono<Document> getCollationInfo(String collectionName) {
|
||||
|
||||
return getCollectionInfo(collectionName) //
|
||||
.map(it -> it.get("options", Document.class)) //
|
||||
.map(it -> it.get("collation", Document.class));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Mono<Document> getCollectionInfo(String collectionName) {
|
||||
|
||||
return template.execute(db -> {
|
||||
|
||||
return Flux
|
||||
.from(db.runCommand(new Document() //
|
||||
.append("listCollections", 1) //
|
||||
.append("filter", new Document("name", collectionName)))) //
|
||||
.map(it -> it.get("cursor", Document.class))
|
||||
.flatMapIterable(it -> (List<Document>) it.get("firstBatch", List.class));
|
||||
}).next();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -803,7 +803,7 @@ public class ReactiveMongoTemplateTests {
|
||||
|
||||
StepVerifier.create(template.dropCollection("capped")
|
||||
.then(template.createCollection("capped", //
|
||||
new CollectionOptions(1000, 10, true)))
|
||||
CollectionOptions.empty().size(1000).maxDocuments(10).capped()))
|
||||
.then(template.insert(new Document("random", Math.random()).append("key", "value"), //
|
||||
"capped")))
|
||||
.expectNextCount(1).verifyComplete();
|
||||
@@ -825,7 +825,7 @@ public class ReactiveMongoTemplateTests {
|
||||
|
||||
StepVerifier.create(template.dropCollection("capped")
|
||||
.then(template.createCollection("capped", //
|
||||
new CollectionOptions(1000, 10, true)))
|
||||
CollectionOptions.empty().size(1000).maxDocuments(10).capped()))
|
||||
.then(template.insert(new Document("random", Math.random()).append("key", "value"), //
|
||||
"capped")))
|
||||
.expectNextCount(1).verifyComplete();
|
||||
|
||||
@@ -21,6 +21,8 @@ import static org.mockito.Mockito.*;
|
||||
import static org.mockito.Mockito.any;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
@@ -137,6 +139,8 @@ public class ReactiveMongoTemplateUnitTests {
|
||||
@Test // DATAMONGO-1518
|
||||
public void findAndModfiyShoudUseCollationWhenPresent() {
|
||||
|
||||
when(collection.findOneAndUpdate(any(), any(), any())).thenReturn(Mono.empty());
|
||||
|
||||
template.findAndModify(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class)
|
||||
.subscribe();
|
||||
|
||||
@@ -149,6 +153,8 @@ public class ReactiveMongoTemplateUnitTests {
|
||||
@Test // DATAMONGO-1518
|
||||
public void findAndRemoveShouldUseCollationWhenPresent() {
|
||||
|
||||
when(collection.findOneAndDelete(any(), any())).thenReturn(Mono.empty());
|
||||
|
||||
template.findAndRemove(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe();
|
||||
|
||||
ArgumentCaptor<FindOneAndDeleteOptions> options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class);
|
||||
@@ -174,6 +180,8 @@ public class ReactiveMongoTemplateUnitTests {
|
||||
@Test // DATAMONGO-1518
|
||||
public void updateOneShouldUseCollationWhenPresent() {
|
||||
|
||||
when(collection.updateOne(any(), any(), any())).thenReturn(Mono.empty());
|
||||
|
||||
template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"),
|
||||
AutogenerateableId.class).subscribe();
|
||||
|
||||
@@ -186,6 +194,8 @@ public class ReactiveMongoTemplateUnitTests {
|
||||
@Test // DATAMONGO-1518
|
||||
public void updateManyShouldUseCollationWhenPresent() {
|
||||
|
||||
when(collection.updateMany(any(), any(), any())).thenReturn(Mono.empty());
|
||||
|
||||
template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"),
|
||||
AutogenerateableId.class).subscribe();
|
||||
|
||||
@@ -199,6 +209,8 @@ public class ReactiveMongoTemplateUnitTests {
|
||||
@Test // DATAMONGO-1518
|
||||
public void replaceOneShouldUseCollationWhenPresent() {
|
||||
|
||||
when(collection.replaceOne(any(), any(), any())).thenReturn(Mono.empty());
|
||||
|
||||
template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class)
|
||||
.subscribe();
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*;
|
||||
@@ -41,7 +43,6 @@ import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.client.gridfs.GridFSFindIterable;
|
||||
import com.mongodb.gridfs.GridFSFile;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link GridFsTemplate}.
|
||||
@@ -210,15 +211,15 @@ public class GridFsTemplateIntegrationTests {
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
}
|
||||
|
||||
private static void assertSame(GridFSFile left, GridFSFile right) {
|
||||
@Test // DATAMONGO-1695
|
||||
public void readsContentTypeCorrectly() throws IOException {
|
||||
|
||||
assertThat(left.getId(), is(right.getId()));
|
||||
assertThat(left.getMD5(), is(right.getMD5()));
|
||||
assertThat(left.getMetaData(), is(right.getMetaData()));
|
||||
operations.store(resource.getInputStream(), "someName", "contentType");
|
||||
|
||||
assertThat(operations.getResource("someName").getContentType()).isEqualTo("contentType");
|
||||
}
|
||||
|
||||
class Metadata {
|
||||
|
||||
String version;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -917,7 +917,7 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
dave.setCreator(user);
|
||||
operations.save(dave);
|
||||
|
||||
assertThat(repository.findOne(QPerson.person.creator.eq(user)), is(dave));
|
||||
assertThat(repository.findOne(QPerson.person.creator.eq(user)).get(), is(dave));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-969
|
||||
|
||||
@@ -15,8 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -31,6 +30,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("config/MongoNamespaceIntegrationTests-context.xml")
|
||||
@@ -49,7 +49,7 @@ public class ContactRepositoryIntegrationTests {
|
||||
Person person = new Person("Oliver", "Gierke");
|
||||
Contact result = repository.save(person);
|
||||
|
||||
assertTrue(repository.findById(result.getId().toString()).get() instanceof Person);
|
||||
assertThat(repository.findById(result.getId().toString())).containsInstanceOf(Person.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -57,6 +57,6 @@ public class ContactRepositoryIntegrationTests {
|
||||
|
||||
Person person = repository.save(new Person("Oliver", "Gierke"));
|
||||
|
||||
assertThat(repository.findOne(Example.of(person)), instanceOf(Person.class));
|
||||
assertThat(repository.findOne(Example.of(person))).containsInstanceOf(Person.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ import org.springframework.util.ClassUtils;
|
||||
* Test for {@link ReactiveMongoRepository} query methods.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:reactive-infrastructure.xml")
|
||||
@@ -175,7 +176,7 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
StepVerifier
|
||||
.create(template.dropCollection(Capped.class) //
|
||||
.then(template.createCollection(Capped.class, //
|
||||
new CollectionOptions(1000, 100, true)))) //
|
||||
CollectionOptions.empty().size(1000).maxDocuments(100).capped()))) //
|
||||
.expectNextCount(1) //
|
||||
.verifyComplete();
|
||||
|
||||
@@ -200,7 +201,7 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
StepVerifier
|
||||
.create(template.dropCollection(Capped.class) //
|
||||
.then(template.createCollection(Capped.class, //
|
||||
new CollectionOptions(1000, 100, true)))) //
|
||||
CollectionOptions.empty().size(1000).maxDocuments(100).capped()))) //
|
||||
.expectNextCount(1) //
|
||||
.verifyComplete();
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.repository;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.domain.ExampleMatcher.*;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
@@ -34,7 +35,9 @@ import org.springframework.beans.factory.BeanClassLoaderAware;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.BeanFactoryAware;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
@@ -47,9 +50,10 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* Test for {@link ReactiveMongoRepository}.
|
||||
* Tests for {@link ReactiveMongoRepository}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:reactive-infrastructure.xml")
|
||||
@@ -115,28 +119,38 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
StepVerifier.create(repository.existsById(Mono.just(dave.id))).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void existsByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
StepVerifier.create(repository.existsById(Flux.just(dave.id, oliver.id))).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void existsByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
StepVerifier.create(repository.existsById(Mono.empty())).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findOneShouldReturnObject() {
|
||||
public void findByIdShouldReturnObject() {
|
||||
StepVerifier.create(repository.findById(dave.id)).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findOneShouldCompleteWithoutValueForAbsentObject() {
|
||||
public void findByIdShouldCompleteWithoutValueForAbsentObject() {
|
||||
StepVerifier.create(repository.findById("unknown")).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findOneByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
public void findByIdByMonoOfIdShouldReturnTrueForExistingObject() {
|
||||
StepVerifier.create(repository.findById(Mono.just(dave.id))).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void findByIdByFluxOfIdShouldReturnTrueForExistingObject() {
|
||||
StepVerifier.create(repository.findById(Flux.just(dave.id, oliver.id))).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findOneByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
public void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() {
|
||||
StepVerifier.create(repository.findById(Mono.empty())).verifyComplete();
|
||||
}
|
||||
|
||||
@@ -324,6 +338,23 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
StepVerifier.create(repository.findById(dave.id)).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void deleteByIdUsingMonoShouldRemoveEntity() {
|
||||
|
||||
StepVerifier.create(repository.deleteById(Mono.just(dave.id))).verifyComplete();
|
||||
|
||||
StepVerifier.create(repository.existsById(dave.id)).expectNext(false).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1712
|
||||
public void deleteByIdUsingFluxShouldRemoveEntity() {
|
||||
|
||||
StepVerifier.create(repository.deleteById(Flux.just(dave.id, oliver.id))).verifyComplete();
|
||||
|
||||
StepVerifier.create(repository.existsById(dave.id)).expectNext(false).verifyComplete();
|
||||
StepVerifier.create(repository.existsById(oliver.id)).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void deleteShouldRemoveEntity() {
|
||||
|
||||
@@ -353,6 +384,63 @@ public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware,
|
||||
StepVerifier.create(repository.findByLastname("Matthews")).expectNext(oliver).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findOneByExampleShouldReturnObject() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave);
|
||||
|
||||
StepVerifier.create(repository.findOne(example)).expectNext(dave).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findAllByExampleShouldReturnObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
StepVerifier.create(repository.findAll(example)).expectNextCount(2).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findAllByExampleAndSortShouldReturnObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
StepVerifier.create(repository.findAll(example, Sort.by("firstname"))).expectNext(dave, oliver).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void countByExampleShouldCountObjects() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
StepVerifier.create(repository.count(example)).expectNext(2L).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void existsByExampleShouldReturnExisting() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname"));
|
||||
|
||||
StepVerifier.create(repository.exists(example)).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void existsByExampleShouldReturnNonExisting() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(new ReactivePerson("foo", "bar", -1));
|
||||
|
||||
StepVerifier.create(repository.exists(example)).expectNext(false).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1619
|
||||
public void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() {
|
||||
|
||||
Example<ReactivePerson> example = Example.of(new ReactivePerson(null, "Matthews", -1),
|
||||
matching().withIgnorePaths("age"));
|
||||
|
||||
StepVerifier.create(repository.findOne(example)).expectError(IncorrectResultSizeDataAccessException.class);
|
||||
}
|
||||
|
||||
interface ReactivePersonRepostitory extends ReactiveMongoRepository<ReactivePerson, String> {
|
||||
|
||||
Flux<ReactivePerson> findByLastname(String lastname);
|
||||
|
||||
@@ -15,8 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -25,6 +24,7 @@ import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
@@ -35,10 +35,11 @@ import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
/**
|
||||
* Integration test for {@link QueryDslMongoRepository}.
|
||||
* Integration test for {@link QuerydslMongoRepository}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@ContextConfiguration(
|
||||
locations = "/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml")
|
||||
@@ -46,7 +47,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
public class QueryDslMongoRepositoryIntegrationTests {
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
QueryDslMongoRepository<Person, String> repository;
|
||||
QuerydslMongoRepository<Person, String> repository;
|
||||
|
||||
Person dave, oliver, carter;
|
||||
QPerson person;
|
||||
@@ -56,7 +57,7 @@ public class QueryDslMongoRepositoryIntegrationTests {
|
||||
|
||||
MongoRepositoryFactory factory = new MongoRepositoryFactory(operations);
|
||||
MongoEntityInformation<Person, String> entityInformation = factory.getEntityInformation(Person.class);
|
||||
repository = new QueryDslMongoRepository<Person, String>(entityInformation, operations);
|
||||
repository = new QuerydslMongoRepository<>(entityInformation, operations);
|
||||
|
||||
operations.dropCollection(Person.class);
|
||||
|
||||
@@ -72,8 +73,8 @@ public class QueryDslMongoRepositoryIntegrationTests {
|
||||
@Test // DATAMONGO-1146
|
||||
public void shouldSupportExistsWithPredicate() throws Exception {
|
||||
|
||||
assertThat(repository.exists(person.firstname.eq("Dave")), is(true));
|
||||
assertThat(repository.exists(person.firstname.eq("Unknown")), is(false));
|
||||
assertThat(repository.exists(person.firstname.eq("Dave"))).isTrue();
|
||||
assertThat(repository.exists(person.firstname.eq("Unknown"))).isFalse();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1167
|
||||
@@ -81,9 +82,21 @@ public class QueryDslMongoRepositoryIntegrationTests {
|
||||
|
||||
List<Person> users = repository.findAll(person.lastname.isNotNull(), Sort.by(Direction.ASC, "firstname"));
|
||||
|
||||
assertThat(users, hasSize(3));
|
||||
assertThat(users.get(0).getFirstname(), is(carter.getFirstname()));
|
||||
assertThat(users.get(2).getFirstname(), is(oliver.getFirstname()));
|
||||
assertThat(users, hasItems(carter, dave, oliver));
|
||||
assertThat(users).containsExactly(carter, dave, oliver);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1690
|
||||
public void findOneWithPredicateReturnsResultCorrectly() {
|
||||
assertThat(repository.findOne(person.firstname.eq(dave.getFirstname()))).contains(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1690
|
||||
public void findOneWithPredicateReturnsOptionalEmptyWhenNoDataFound() {
|
||||
assertThat(repository.findOne(person.firstname.eq("batman"))).isNotPresent();
|
||||
}
|
||||
|
||||
@Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1690
|
||||
public void findOneWithPredicateThrowsExceptionForNonUniqueResults() {
|
||||
repository.findOne(person.firstname.contains("e"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.domain.ExampleMatcher.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -34,9 +33,9 @@ import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.ExampleMatcher.*;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
@@ -86,32 +85,28 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
@Test
|
||||
public void findALlFromCustomCollectionName() {
|
||||
List<Person> result = repository.findAll();
|
||||
assertThat(result, hasSize(all.size()));
|
||||
assertThat(repository.findAll()).hasSize(all.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneFromCustomCollectionName() {
|
||||
Person result = repository.findById(dave.getId()).get();
|
||||
assertThat(result, is(dave));
|
||||
assertThat(repository.findById(dave.getId()).get()).isEqualTo(dave);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void deleteFromCustomCollectionName() {
|
||||
repository.delete(dave);
|
||||
List<Person> result = repository.findAll();
|
||||
|
||||
assertThat(result, hasSize(all.size() - 1));
|
||||
assertThat(result, not(hasItem(dave)));
|
||||
repository.delete(dave);
|
||||
|
||||
assertThat(repository.findAll()).hasSize(all.size() - 1).doesNotContain(dave);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void deleteByIdFromCustomCollectionName() {
|
||||
repository.deleteById(dave.getId());
|
||||
List<Person> result = repository.findAll();
|
||||
|
||||
assertThat(result, hasSize(all.size() - 1));
|
||||
assertThat(result, not(hasItem(dave)));
|
||||
repository.deleteById(dave.getId());
|
||||
|
||||
assertThat(repository.findAll()).hasSize(all.size() - 1).doesNotContain(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1054
|
||||
@@ -122,9 +117,7 @@ public class SimpleMongoRepositoryTests {
|
||||
Person person1 = new Person("First1" + randomId, "Last2" + randomId, 42);
|
||||
person1 = repository.insert(person1);
|
||||
|
||||
Person saved = repository.findById(person1.getId()).get();
|
||||
|
||||
assertThat(saved, is(equalTo(person1)));
|
||||
assertThat(repository.findById(person1.getId())).contains(person1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1054
|
||||
@@ -142,7 +135,7 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
List<Person> saved = repository.insert(persons);
|
||||
|
||||
assertThat(saved, hasSize(persons.size()));
|
||||
assertThat(saved).hasSize(persons.size());
|
||||
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
|
||||
}
|
||||
|
||||
@@ -161,7 +154,7 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
List<Person> saved = repository.insert(persons);
|
||||
|
||||
assertThat(saved, hasSize(persons.size()));
|
||||
assertThat(saved).hasSize(persons.size());
|
||||
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
|
||||
}
|
||||
|
||||
@@ -174,9 +167,8 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
Page<Person> result = repository.findAll(Example.of(sample), PageRequest.of(0, 10));
|
||||
|
||||
assertThat(result.getContent(), hasItems(dave, oliver));
|
||||
assertThat(result.getContent(), hasSize(2));
|
||||
assertThat(result.getTotalPages(), is(1));
|
||||
assertThat(result.getContent()).hasSize(2).contains(dave, oliver);
|
||||
assertThat(result.getTotalPages()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1464
|
||||
@@ -188,8 +180,8 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
Page<Person> result = repository.findAll(Example.of(sample), PageRequest.of(0, 1));
|
||||
|
||||
assertThat(result.getContent(), hasSize(1));
|
||||
assertThat(result.getTotalPages(), is(2));
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalPages()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -199,10 +191,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLastname("Matthews");
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, containsInAnyOrder(dave, oliver));
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(2).contains(dave, oliver);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -218,10 +207,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setAddress(dave.getAddress());
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasItem(dave));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -237,10 +223,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setAddress(new Address(null, null, "Washington"));
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasItems(dave, oliver));
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(2).contains(dave, oliver);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -254,9 +237,8 @@ public class SimpleMongoRepositoryTests {
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
Example<Person> example = Example.of(sample, matching().withIncludeNullValues());
|
||||
List<Person> result = repository.findAll(example);
|
||||
|
||||
assertThat(result, empty());
|
||||
assertThat(repository.findAll(example)).isEmpty();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -270,10 +252,8 @@ public class SimpleMongoRepositoryTests {
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
Example<Person> example = Example.of(sample, matching().withIncludeNullValues());
|
||||
List<Person> result = repository.findAll(example);
|
||||
|
||||
assertThat(result, hasItem(dave));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(repository.findAll(example)).hasSize(1).contains(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -284,10 +264,8 @@ public class SimpleMongoRepositoryTests {
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
Example<Person> example = Example.of(sample, matching().withStringMatcher(StringMatcher.STARTING));
|
||||
List<Person> result = repository.findAll(example);
|
||||
|
||||
assertThat(result, hasItems(dave, oliver));
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(repository.findAll(example)).hasSize(2).contains(dave, oliver);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -307,10 +285,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setCreator(user);
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasItem(megan));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -325,10 +300,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLocation(megan.getLocation());
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasItem(megan));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -343,10 +315,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLocation(megan.getLocation());
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<Person> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasItem(megan));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -362,10 +331,7 @@ public class SimpleMongoRepositoryTests {
|
||||
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
List<PersonExtended> result = repository.findAll(Example.of(sample));
|
||||
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(result, hasItem(reference));
|
||||
assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -376,9 +342,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLastname("Matthews");
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
Person result = repository.findOne(Example.of(sample));
|
||||
|
||||
assertThat(result, is(equalTo(dave)));
|
||||
assertThat(repository.findOne(Example.of(sample))).isPresent().contains(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -389,9 +353,7 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLastname("Matthews");
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
boolean result = repository.exists(Example.of(sample));
|
||||
|
||||
assertThat(result, is(true));
|
||||
assertThat(repository.exists(Example.of(sample))).isTrue();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@@ -401,16 +363,14 @@ public class SimpleMongoRepositoryTests {
|
||||
sample.setLastname("Matthews");
|
||||
trimDomainType(sample, "id", "createdAt", "email");
|
||||
|
||||
long result = repository.count(Example.of(sample));
|
||||
|
||||
assertThat(result, is(equalTo(2L)));
|
||||
assertThat(repository.count(Example.of(sample))).isEqualTo(2L);
|
||||
}
|
||||
|
||||
private void assertThatAllReferencePersonsWereStoredCorrectly(Map<String, Person> references, List<Person> saved) {
|
||||
|
||||
for (Person person : saved) {
|
||||
Person reference = references.get(person.getId());
|
||||
assertThat(person, is(equalTo(reference)));
|
||||
assertThat(person).isEqualTo(reference);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import example.first.First
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class ExecutableAggregationOperationExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operation: ExecutableAggregationOperation
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregateAndReturn(KClass) extension should call its Java counterpart`() {
|
||||
operation.aggregateAndReturn(First::class)
|
||||
Mockito.verify(operation, Mockito.times(1)).aggregateAndReturn(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() {
|
||||
operation.aggregateAndReturn<First>()
|
||||
Mockito.verify(operation, Mockito.times(1)).aggregateAndReturn(First::class.java)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import example.first.First
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class ExecutableFindOperationExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operation: ExecutableFindOperation
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operationWithProjection: ExecutableFindOperation.FindOperationWithProjection<First>
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `ExecutableFindOperation#query(KClass) extension should call its Java counterpart`() {
|
||||
operation.query(First::class)
|
||||
Mockito.verify(operation, Mockito.times(1)).query(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `ExecutableFindOperation#query() with reified type parameter extension should call its Java counterpart`() {
|
||||
operation.query<First>()
|
||||
Mockito.verify(operation, Mockito.times(1)).query(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `ExecutableFindOperation#FindOperationWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
operationWithProjection.asType(First::class)
|
||||
Mockito.verify(operationWithProjection, Mockito.times(1)).`as`(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `ExecutableFindOperation#FindOperationWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
operationWithProjection.asType()
|
||||
Mockito.verify(operationWithProjection, Mockito.times(1)).`as`(First::class.java)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import example.first.First
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class ExecutableInsertOperationExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operation: ExecutableInsertOperation
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `insert(KClass) extension should call its Java counterpart`() {
|
||||
operation.insert(First::class)
|
||||
Mockito.verify(operation, Mockito.times(1)).insert(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `insert() with reified type parameter extension should call its Java counterpart`() {
|
||||
operation.insert<First>()
|
||||
Mockito.verify(operation, Mockito.times(1)).insert(First::class.java)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import example.first.First
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class ExecutableRemoveOperationExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operation: ExecutableRemoveOperation
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(KClass) extension should call its Java counterpart`() {
|
||||
operation.remove(First::class)
|
||||
Mockito.verify(operation, Mockito.times(1)).remove(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove() with reified type parameter extension should call its Java counterpart`() {
|
||||
operation.remove<First>()
|
||||
Mockito.verify(operation, Mockito.times(1)).remove(First::class.java)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,680 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import com.nhaarman.mockito_kotlin.mock
|
||||
import example.first.First
|
||||
import example.second.Second
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito.*
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions
|
||||
import org.springframework.data.mongodb.core.query.Criteria
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
import org.springframework.data.mongodb.core.query.Update
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class MongoOperationsExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operations: MongoOperations
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `getCollectionName(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.getCollectionName(First::class)
|
||||
verify(operations, times(1)).getCollectionName(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `getCollectionName() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.getCollectionName<First>()
|
||||
verify(operations, times(1)).getCollectionName(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `execute(CollectionCallback) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionCallback = mock<CollectionCallback<First>>()
|
||||
operations.execute(collectionCallback)
|
||||
verify(operations, times(1)).execute(First::class.java, collectionCallback)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `stream(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
operations.stream<First>(query)
|
||||
verify(operations, times(1)).stream(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `stream(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
operations.stream<First>(query, collectionName)
|
||||
verify(operations, times(1)).stream(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.createCollection(First::class)
|
||||
verify(operations, times(1)).createCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(KClass, CollectionOptions) extension should call its Java counterpart`() {
|
||||
|
||||
val collectionOptions = mock<CollectionOptions>()
|
||||
operations.createCollection(First::class, collectionOptions)
|
||||
verify(operations, times(1)).createCollection(First::class.java, collectionOptions)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.createCollection<First>()
|
||||
verify(operations, times(1)).createCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionOptions = mock<CollectionOptions>()
|
||||
operations.createCollection<First>(collectionOptions)
|
||||
verify(operations, times(1)).createCollection(First::class.java, collectionOptions)
|
||||
}
|
||||
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `collectionExists(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.collectionExists(First::class)
|
||||
verify(operations, times(1)).collectionExists(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `collectionExists() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.collectionExists<First>()
|
||||
verify(operations, times(1)).collectionExists(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `dropCollection(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.dropCollection(First::class)
|
||||
verify(operations, times(1)).dropCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `dropCollection() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.dropCollection<First>()
|
||||
verify(operations, times(1)).dropCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `indexOps(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.indexOps(First::class)
|
||||
verify(operations, times(1)).indexOps(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `indexOps() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.indexOps<First>()
|
||||
verify(operations, times(1)).indexOps(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `bulkOps(BulkMode, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val bulkMode = BulkMode.ORDERED
|
||||
|
||||
operations.bulkOps(bulkMode, First::class)
|
||||
verify(operations, times(1)).bulkOps(bulkMode, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `bulkOps(BulkMode, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val bulkMode = BulkMode.ORDERED
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.bulkOps(bulkMode, First::class, collectionName)
|
||||
verify(operations, times(1)).bulkOps(bulkMode, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `bulkOps(BulkMode) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val bulkMode = BulkMode.ORDERED
|
||||
|
||||
operations.bulkOps<First>(bulkMode)
|
||||
verify(operations, times(1)).bulkOps(bulkMode, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `bulkOps(BulkMode, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val bulkMode = BulkMode.ORDERED
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.bulkOps<First>(bulkMode, collectionName)
|
||||
verify(operations, times(1)).bulkOps(bulkMode, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAll() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.findAll<First>()
|
||||
verify(operations, times(1)).findAll(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAll(String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.findAll<First>(collectionName)
|
||||
verify(operations, times(1)).findAll(First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `group(String, GroupBy) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val groupBy = mock<GroupBy>()
|
||||
|
||||
operations.group<First>(collectionName, groupBy)
|
||||
verify(operations, times(1)).group(collectionName, groupBy, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `group(Criteria, String, GroupBy) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val criteria = mock<Criteria>()
|
||||
val collectionName = "foo"
|
||||
val groupBy = mock<GroupBy>()
|
||||
|
||||
operations.group<First>(criteria, collectionName, groupBy)
|
||||
verify(operations, times(1)).group(criteria, collectionName, groupBy, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregate(Aggregation, KClass) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val aggregation = mock<Aggregation>()
|
||||
|
||||
operations.aggregate<First>(aggregation, Second::class)
|
||||
verify(operations, times(1)).aggregate(aggregation, Second::class.java, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregate(Aggregation, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val aggregation = mock<Aggregation>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.aggregate<First>(aggregation, collectionName)
|
||||
verify(operations, times(1)).aggregate(aggregation, collectionName, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregateStream(Aggregation, KClass) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val aggregation = mock<Aggregation>()
|
||||
|
||||
operations.aggregateStream<First>(aggregation, Second::class)
|
||||
verify(operations, times(1)).aggregateStream(aggregation, Second::class.java, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `aggregateStream(Aggregation, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val aggregation = mock<Aggregation>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.aggregateStream<First>(aggregation, collectionName)
|
||||
verify(operations, times(1)).aggregateStream(aggregation, collectionName, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `mapReduce(String, String, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val mapFunction = "bar"
|
||||
val reduceFunction = "baz"
|
||||
|
||||
operations.mapReduce<First>(collectionName, mapFunction, reduceFunction)
|
||||
verify(operations, times(1)).mapReduce(collectionName, mapFunction, reduceFunction, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `mapReduce(String, String, String, MapReduceOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val mapFunction = "bar"
|
||||
val reduceFunction = "baz"
|
||||
val options = mock<MapReduceOptions>()
|
||||
|
||||
operations.mapReduce<First>(collectionName, mapFunction, reduceFunction, options)
|
||||
verify(operations, times(1)).mapReduce(collectionName, mapFunction, reduceFunction, options, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `mapReduce(Query, String, String, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
val mapFunction = "bar"
|
||||
val reduceFunction = "baz"
|
||||
|
||||
operations.mapReduce<First>(query, collectionName, mapFunction, reduceFunction)
|
||||
verify(operations, times(1)).mapReduce(query, collectionName, mapFunction, reduceFunction, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `mapReduce(Query, String, String, String, MapReduceOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
val mapFunction = "bar"
|
||||
val reduceFunction = "baz"
|
||||
val options = mock<MapReduceOptions>()
|
||||
|
||||
operations.mapReduce<First>(query, collectionName, mapFunction, reduceFunction, options)
|
||||
verify(operations, times(1)).mapReduce(query, collectionName, mapFunction, reduceFunction, options, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `geoNear(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = NearQuery.near(0.0, 0.0)
|
||||
|
||||
operations.geoNear<First>(query)
|
||||
verify(operations, times(1)).geoNear(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `geoNear(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = NearQuery.near(0.0, 0.0)
|
||||
|
||||
operations.geoNear<First>(query, collectionName)
|
||||
verify(operations, times(1)).geoNear(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findOne<First>(query)
|
||||
verify(operations, times(1)).findOne(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findOne<First>(query, collectionName)
|
||||
verify(operations, times(1)).findOne(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `exists(Query, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.exists(query, First::class)
|
||||
verify(operations, times(1)).exists(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `exists(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.exists<First>(query)
|
||||
verify(operations, times(1)).exists(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `find(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.find<First>(query)
|
||||
verify(operations, times(1)).find(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.find<First>(query, collectionName)
|
||||
verify(operations, times(1)).find(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findById(Any) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val id = 1L
|
||||
|
||||
operations.findById<First>(id)
|
||||
verify(operations, times(1)).findById(id, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findById(Any, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val id = 1L
|
||||
|
||||
operations.findById<First>(id, collectionName)
|
||||
verify(operations, times(1)).findById(id, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val options = mock<FindAndModifyOptions>()
|
||||
|
||||
operations.findAndModify<First>(query, update, options)
|
||||
verify(operations, times(1)).findAndModify(query, update, options, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val options = mock<FindAndModifyOptions>()
|
||||
|
||||
operations.findAndModify<First>(query, update, options, collectionName)
|
||||
verify(operations, times(1)).findAndModify(query, update, options, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findAndRemove<First>(query)
|
||||
verify(operations, times(1)).findAndRemove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.findAndRemove<First>(query, collectionName)
|
||||
verify(operations, times(1)).findAndRemove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.count<First>()
|
||||
verify(operations, times(1)).count(any<Query>(), eq(First::class.java))
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.count<First>(query)
|
||||
verify(operations, times(1)).count(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.count<First>(query, collectionName)
|
||||
verify(operations, times(1)).count(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, KClass) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.count(query, First::class)
|
||||
verify(operations, times(1)).count(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, KClass, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.count(query, First::class, collectionName)
|
||||
verify(operations, times(1)).count(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `insert(Collection, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val collection = listOf(First(), First())
|
||||
|
||||
operations.insert(collection, First::class)
|
||||
verify(operations, times(1)).insert(collection, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.upsert(query, update, First::class)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.upsert(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.upsert<First>(query, update)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.upsert<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateFirst(query, update, First::class)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateFirst(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateFirst<First>(query, update)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateFirst<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateMulti(query, update, First::class)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateMulti(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateMulti<First>(query, update)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateMulti<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.remove(query, First::class)
|
||||
verify(operations, times(1)).remove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.remove(query, First::class, collectionName)
|
||||
verify(operations, times(1)).remove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.remove<First>(query)
|
||||
verify(operations, times(1)).remove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.remove<First>(query, collectionName)
|
||||
verify(operations, times(1)).remove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findAllAndRemove<First>(query)
|
||||
verify(operations, times(1)).findAllAndRemove(query, First::class.java)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,529 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core
|
||||
|
||||
import com.nhaarman.mockito_kotlin.mock
|
||||
import example.first.First
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.mockito.Answers
|
||||
import org.mockito.Mock
|
||||
import org.mockito.Mockito.*
|
||||
import org.mockito.junit.MockitoJUnitRunner
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
import org.springframework.data.mongodb.core.query.Update
|
||||
import reactor.core.publisher.Mono
|
||||
|
||||
/**
|
||||
* @author Sebastien Deleuze
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner::class)
|
||||
class ReactiveMongoOperationsExtensionsTests {
|
||||
|
||||
@Mock(answer = Answers.RETURNS_MOCKS)
|
||||
lateinit var operations: ReactiveMongoOperations
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `indexOps(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.indexOps(First::class)
|
||||
verify(operations, times(1)).indexOps(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `indexOps() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.indexOps<First>()
|
||||
verify(operations, times(1)).indexOps(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `execute(ReactiveCollectionCallback) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionCallback = mock<ReactiveCollectionCallback<First>>()
|
||||
|
||||
operations.execute(collectionCallback)
|
||||
verify(operations, times(1)).execute(First::class.java, collectionCallback)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.createCollection(First::class)
|
||||
verify(operations, times(1)).createCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(KClass, CollectionOptions) extension should call its Java counterpart`() {
|
||||
|
||||
val collectionOptions = mock<CollectionOptions>()
|
||||
|
||||
operations.createCollection(First::class, collectionOptions)
|
||||
verify(operations, times(1)).createCollection(First::class.java, collectionOptions)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.createCollection<First>()
|
||||
verify(operations, times(1)).createCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionOptions = mock<CollectionOptions>()
|
||||
|
||||
operations.createCollection<First>(collectionOptions)
|
||||
verify(operations, times(1)).createCollection(First::class.java, collectionOptions)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `collectionExists(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.collectionExists(First::class)
|
||||
verify(operations, times(1)).collectionExists(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `collectionExists() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.collectionExists<First>()
|
||||
verify(operations, times(1)).collectionExists(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `dropCollection(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operations.dropCollection(First::class)
|
||||
verify(operations, times(1)).dropCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `dropCollection() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.dropCollection<First>()
|
||||
verify(operations, times(1)).dropCollection(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAll() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.findAll<First>()
|
||||
verify(operations, times(1)).findAll(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAll(String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.findAll<First>(collectionName)
|
||||
verify(operations, times(1)).findAll(First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findOne<First>(query)
|
||||
verify(operations, times(1)).findOne(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findOne<First>(query, collectionName)
|
||||
verify(operations, times(1)).findOne(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `exists(Query, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.exists(query, First::class)
|
||||
verify(operations, times(1)).exists(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `exists(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.exists<First>(query)
|
||||
verify(operations, times(1)).exists(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `find(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.find<First>(query)
|
||||
verify(operations, times(1)).find(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.find<First>(query, collectionName)
|
||||
verify(operations, times(1)).find(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findById(Any) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val id = 1L
|
||||
|
||||
operations.findById<First>(id)
|
||||
verify(operations, times(1)).findById(id, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findById(Any, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val id = 1L
|
||||
|
||||
operations.findById<First>(id, collectionName)
|
||||
verify(operations, times(1)).findById(id, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `geoNear(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = NearQuery.near(0.0, 0.0)
|
||||
|
||||
operations.geoNear<First>(query)
|
||||
verify(operations, times(1)).geoNear(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `geoNear(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = NearQuery.near(0.0, 0.0)
|
||||
|
||||
operations.geoNear<First>(query, collectionName)
|
||||
verify(operations, times(1)).geoNear(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val options = mock<FindAndModifyOptions>()
|
||||
|
||||
operations.findAndModify<First>(query, update, options)
|
||||
verify(operations, times(1)).findAndModify(query, update, options, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val collectionName = "foo"
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val options = mock<FindAndModifyOptions>()
|
||||
|
||||
operations.findAndModify<First>(query, update, options, collectionName)
|
||||
verify(operations, times(1)).findAndModify(query, update, options, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findAndRemove<First>(query)
|
||||
verify(operations, times(1)).findAndRemove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.findAndRemove<First>(query, collectionName)
|
||||
verify(operations, times(1)).findAndRemove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operations.count<First>()
|
||||
verify(operations, times(1)).count(any<Query>(), eq(First::class.java))
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.count<First>(query)
|
||||
verify(operations, times(1)).count(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.count<First>(query, collectionName)
|
||||
verify(operations, times(1)).count(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, KClass) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.count(query, First::class)
|
||||
verify(operations, times(1)).count(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `count(Query, KClass, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.count(query, First::class, collectionName)
|
||||
verify(operations, times(1)).count(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `insert(Collection, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val collection = listOf(First(), First())
|
||||
|
||||
operations.insert(collection, First::class)
|
||||
verify(operations, times(1)).insert(collection, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `insertAll(Mono, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val collection = Mono.just(listOf(First(), First()))
|
||||
|
||||
operations.insertAll(collection, First::class)
|
||||
verify(operations, times(1)).insertAll(collection, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.upsert(query, update, First::class)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.upsert(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.upsert<First>(query, update)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.upsert<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).upsert(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateFirst(query, update, First::class)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateFirst(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateFirst<First>(query, update)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateFirst<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).updateFirst(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateMulti(query, update, First::class)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateMulti(query, update, First::class, collectionName)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
|
||||
operations.updateMulti<First>(query, update)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val update = mock<Update>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.updateMulti<First>(query, update, collectionName)
|
||||
verify(operations, times(1)).updateMulti(query, update, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, KClass) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.remove(query, First::class)
|
||||
verify(operations, times(1)).remove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, KClass, String) extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.remove(query, First::class, collectionName)
|
||||
verify(operations, times(1)).remove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.remove<First>(query)
|
||||
verify(operations, times(1)).remove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.remove<First>(query, collectionName)
|
||||
verify(operations, times(1)).remove(query, First::class.java, collectionName)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.findAllAndRemove<First>(query)
|
||||
verify(operations, times(1)).findAllAndRemove(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `tail(Query) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
|
||||
operations.tail<First>(query)
|
||||
verify(operations, times(1)).tail(query, First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
fun `tail(Query, String) with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
val query = mock<Query>()
|
||||
val collectionName = "foo"
|
||||
|
||||
operations.tail<First>(query, collectionName)
|
||||
verify(operations, times(1)).tail(query, First::class.java, collectionName)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,39 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 2.0.0.M4 (2017-06-14)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1716 - Upgrade to Reactive Streams driver 1.5.0.
|
||||
* DATAMONGO-1714 - Deprecate MongoLog4jAppender.
|
||||
* DATAMONGO-1712 - Adopt to ReactiveCrudRepository.findById(Publisher) and existsById(Publisher).
|
||||
* DATAMONGO-1710 - Adopt to changed AnnotationUtils.getValue(…) and OperatorNode.getRightOperand() behavior.
|
||||
* DATAMONGO-1707 - Upgrade to Reactor 3.1 M2.
|
||||
* DATAMONGO-1699 - Upgrade travis.yml to use MongoDB 3.4.
|
||||
* DATAMONGO-1695 - Make sure GridFsResource.getContentType() reads type from new location within file metadata.
|
||||
* DATAMONGO-1693 - Support collation in ReactiveMongoTemplate.createCollection.
|
||||
* DATAMONGO-1690 - Adapt to QuerydslPerdicateExecutor API changes.
|
||||
* DATAMONGO-1689 - Provide Kotlin extensions for Class based methods in MongoOperations / ReactiveMongoOperations.
|
||||
* DATAMONGO-1688 - Release 2.0 M4 (Kay).
|
||||
* DATAMONGO-1687 - Creating capped collection with CollectionOptions.empty().capped(…) causes NPE.
|
||||
* DATAMONGO-1686 - Upgarde to MongoDB reactive streams driver 1.4.
|
||||
* DATAMONGO-1685 - Adapt QueryByExampleExecutor API changes.
|
||||
* DATAMONGO-1619 - Use ReactiveQueryByExampleExecutor in ReactiveMongoRepository.
|
||||
* DATAMONGO-1563 - Add TemplateWrapper to reduce method overloads on MongoTemplate.
|
||||
|
||||
|
||||
Changes in version 1.10.4.RELEASE (2017-06-08)
|
||||
----------------------------------------------
|
||||
* DATAMONGO-1699 - Upgrade travis.yml to use MongoDB 3.4.
|
||||
* DATAMONGO-1672 - Release 1.10.4 (Ingalls SR4).
|
||||
* DATAMONGO-1205 - CyclicPropertyReferenceException logged with stack trace.
|
||||
|
||||
|
||||
Changes in version 1.9.11.RELEASE (2017-06-07)
|
||||
----------------------------------------------
|
||||
* DATAMONGO-1671 - Release 1.9.11 (Hopper SR11).
|
||||
* DATAMONGO-1205 - CyclicPropertyReferenceException logged with stack trace.
|
||||
|
||||
|
||||
Changes in version 2.0.0.M3 (2017-05-09)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1684 - Adopt documentation to removed JodaTime DateMidnight support.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 2.0 M3
|
||||
Spring Data MongoDB 2.0 M4
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user