Browse Source

DATAMONGO-1311 - Polishing.

Update Javadoc and add reference documentation.
Alter @Meta batchSize default to zero, as negative values bear a special meaning.
Along the lines remove deprecated driver method usage and add deprecations for options about the be removed in subsequent MongoDB server releases.

Original Pull Request: #575
pull/577/head
Christoph Strobl 8 years ago
parent
commit
30b86e7612
  1. 174
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java
  2. 24
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java
  3. 7
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java
  4. 29
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java
  5. 51
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java
  6. 9
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java
  7. 5
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java
  8. 5
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java
  9. 4
      spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java
  10. 51
      spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java
  11. 22
      spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java
  12. 12
      spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java
  13. 29
      src/main/asciidoc/reference/mongodb.adoc

174
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java

@ -3281,8 +3281,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, @@ -3281,8 +3281,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
Meta meta = query.getMeta();
if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject())
&& !StringUtils.hasText(query.getHint()) && !meta.hasValues()
&& !query.getCollation().isPresent()) {
&& !StringUtils.hasText(query.getHint()) && !meta.hasValues() && !query.getCollation().isPresent()) {
return cursor;
}
@ -3302,15 +3301,30 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, @@ -3302,15 +3301,30 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
cursorToUse = cursorToUse.sort(sort);
}
Document metaDocument = new Document();
if (StringUtils.hasText(query.getHint())) {
metaDocument.put("$hint", query.getHint());
cursorToUse = cursorToUse.hint(Document.parse(query.getHint()));
}
if (meta.hasValues()) {
for (Entry<String, Object> entry : meta.values()) {
metaDocument.put(entry.getKey(), entry.getValue());
if (StringUtils.hasText(meta.getComment())) {
cursorToUse = cursorToUse.comment(meta.getComment());
}
if (meta.getSnapshot()) {
cursorToUse = cursorToUse.snapshot(meta.getSnapshot());
}
if (meta.getMaxScan() != null) {
cursorToUse = cursorToUse.maxScan(meta.getMaxScan());
}
if (meta.getMaxTimeMsec() != null) {
cursorToUse = cursorToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS);
}
if (meta.getCursorBatchSize() != null) {
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
}
for (Meta.CursorOption option : meta.getFlags()) {
@ -3327,13 +3341,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, @@ -3327,13 +3341,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
throw new IllegalArgumentException(String.format("%s is no supported flag.", option));
}
}
if (meta.getCursorBatchSize() != null) {
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
}
}
cursorToUse = cursorToUse.modifiers(metaDocument);
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
}
@ -3464,149 +3473,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware, @@ -3464,149 +3473,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
return mongoDbFactory;
}
/**
* {@link BatchAggregationLoader} is a little helper that can process cursor results returned by an aggregation
* command execution. On presence of a {@literal nextBatch} indicated by presence of an {@code id} field in the
* {@code cursor} another {@code getMore} command gets executed reading the next batch of documents until all results
* are loaded.
*
* @author Christoph Strobl
* @since 1.10
*/
static class BatchAggregationLoader {
private static final String CURSOR_FIELD = "cursor";
private static final String RESULT_FIELD = "result";
private static final String BATCH_SIZE_FIELD = "batchSize";
private static final String FIRST_BATCH = "firstBatch";
private static final String NEXT_BATCH = "nextBatch";
private static final String SERVER_USED = "serverUsed";
private static final String OK = "ok";
private final MongoTemplate template;
private final ReadPreference readPreference;
private final int batchSize;
BatchAggregationLoader(MongoTemplate template, ReadPreference readPreference, int batchSize) {
this.template = template;
this.readPreference = readPreference;
this.batchSize = batchSize;
}
/**
* Run aggregation command and fetch all results.
*/
Document aggregate(String collectionName, Aggregation aggregation, AggregationOperationContext context) {
Document command = prepareAggregationCommand(collectionName, aggregation, context, batchSize);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
}
return mergeAggregationResults(aggregateBatched(command, collectionName, batchSize));
}
/**
* Pre process the aggregation command sent to the server by adding {@code cursor} options to match execution on
* different server versions.
*/
private static Document prepareAggregationCommand(String collectionName, Aggregation aggregation,
@Nullable AggregationOperationContext context, int batchSize) {
AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context;
Document command = aggregation.toDocument(collectionName, rootContext);
if (!aggregation.getOptions().isExplain()) {
command.put(CURSOR_FIELD, new Document(BATCH_SIZE_FIELD, batchSize));
}
return command;
}
private List<Document> aggregateBatched(Document command, String collectionName, int batchSize) {
List<Document> results = new ArrayList<>();
Document commandResult = template.executeCommand(command, readPreference);
results.add(postProcessResult(commandResult));
while (hasNext(commandResult)) {
Document getMore = new Document("getMore", getNextBatchId(commandResult)) //
.append("collection", collectionName) //
.append(BATCH_SIZE_FIELD, batchSize);
commandResult = template.executeCommand(getMore, this.readPreference);
results.add(postProcessResult(commandResult));
}
return results;
}
private static Document postProcessResult(Document commandResult) {
if (!commandResult.containsKey(CURSOR_FIELD)) {
return commandResult;
}
Document resultObject = new Document(SERVER_USED, commandResult.get(SERVER_USED));
resultObject.put(OK, commandResult.get(OK));
Document cursor = (Document) commandResult.get(CURSOR_FIELD);
if (cursor.containsKey(FIRST_BATCH)) {
resultObject.put(RESULT_FIELD, cursor.get(FIRST_BATCH));
} else {
resultObject.put(RESULT_FIELD, cursor.get(NEXT_BATCH));
}
return resultObject;
}
private static Document mergeAggregationResults(List<Document> batchResults) {
if (batchResults.size() == 1) {
return batchResults.iterator().next();
}
Document commandResult = new Document();
List<Object> allResults = new ArrayList<>();
for (Document batchResult : batchResults) {
Collection documents = (Collection<?>) batchResult.get(RESULT_FIELD);
if (!CollectionUtils.isEmpty(documents)) {
allResults.addAll(documents);
}
}
// take general info from first batch
commandResult.put(SERVER_USED, batchResults.iterator().next().get(SERVER_USED));
commandResult.put(OK, batchResults.iterator().next().get(OK));
// and append the merged batchResults
commandResult.put(RESULT_FIELD, allResults);
return commandResult;
}
private static boolean hasNext(Document commandResult) {
if (!commandResult.containsKey(CURSOR_FIELD)) {
return false;
}
Object next = getNextBatchId(commandResult);
return next != null && ((Number) next).longValue() != 0L;
}
@Nullable
private static Object getNextBatchId(Document commandResult) {
return ((Document) commandResult.get(CURSOR_FIELD)).get("id");
}
}
/**
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
* server through the driver API.

24
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java

@ -116,7 +116,6 @@ import org.springframework.util.ObjectUtils; @@ -116,7 +116,6 @@ import org.springframework.util.ObjectUtils;
import org.springframework.util.ResourceUtils;
import org.springframework.util.StringUtils;
import com.mongodb.BasicDBObject;
import com.mongodb.ClientSessionOptions;
import com.mongodb.CursorType;
import com.mongodb.DBCollection;
@ -3237,15 +3236,27 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati @@ -3237,15 +3236,27 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject();
findPublisherToUse = findPublisherToUse.sort(sort);
}
BasicDBObject modifiers = new BasicDBObject();
if (StringUtils.hasText(query.getHint())) {
modifiers.append("$hint", query.getHint());
findPublisherToUse = findPublisherToUse.hint(Document.parse(query.getHint()));
}
if (meta.hasValues()) {
for (Entry<String, Object> entry : meta.values()) {
modifiers.append(entry.getKey(), entry.getValue());
if (StringUtils.hasText(meta.getComment())) {
findPublisherToUse = findPublisherToUse.comment(meta.getComment());
}
if (meta.getSnapshot()) {
findPublisherToUse = findPublisherToUse.snapshot(meta.getSnapshot());
}
if (meta.getMaxScan() != null) {
findPublisherToUse = findPublisherToUse.maxScan(meta.getMaxScan());
}
if (meta.getMaxTimeMsec() != null) {
findPublisherToUse = findPublisherToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS);
}
if (meta.getCursorBatchSize() != null) {
@ -3253,9 +3264,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati @@ -3253,9 +3264,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
}
}
if (!modifiers.isEmpty()) {
findPublisherToUse = findPublisherToUse.modifiers(modifiers);
}
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
}

7
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java

@ -164,13 +164,14 @@ public class MongoExampleMapper { @@ -164,13 +164,14 @@ public class MongoExampleMapper {
if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) {
PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath);
value = valueTransformer.convert(value);
if (value == null) {
Optional converted = valueTransformer.apply(Optional.ofNullable(value));
if(!converted.isPresent()) {
iter.remove();
continue;
}
entry.setValue(value);
entry.setValue(converted.get());
}
if (entry.getValue() instanceof String) {

29
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java

@ -15,6 +15,7 @@ @@ -15,6 +15,7 @@
*/
package org.springframework.data.mongodb.core.query;
import java.time.Duration;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
@ -66,7 +67,7 @@ public class Meta { @@ -66,7 +67,7 @@ public class Meta {
* @param maxTimeMsec
*/
public void setMaxTimeMsec(long maxTimeMsec) {
setMaxTime(maxTimeMsec, TimeUnit.MILLISECONDS);
setMaxTime(Duration.ofMillis(maxTimeMsec));
}
/**
@ -74,11 +75,25 @@ public class Meta { @@ -74,11 +75,25 @@ public class Meta {
*
* @param timeout
* @param timeUnit
* @deprecated since 2.1. Use {@link #setMaxTime(Duration)} instead.
*/
@Deprecated
public void setMaxTime(long timeout, @Nullable TimeUnit timeUnit) {
setValue(MetaKey.MAX_TIME_MS.key, (timeUnit != null ? timeUnit : TimeUnit.MILLISECONDS).toMillis(timeout));
}
/**
* Set the maximum time limit for processing operations.
*
* @param timeout must not be {@literal null}.
* @since 2.1
*/
public void setMaxTime(Duration timeout) {
Assert.notNull(timeout, "Timeout must not be null!");
setValue(MetaKey.MAX_TIME_MS.key, timeout.toMillis());
}
/**
* @return {@literal null} if not set.
*/
@ -91,13 +106,15 @@ public class Meta { @@ -91,13 +106,15 @@ public class Meta {
* Only scan the specified number of documents.
*
* @param maxScan
* @deprecated since 2.1 due to deprecation in MongoDB 4.0.
*/
@Deprecated
public void setMaxScan(long maxScan) {
setValue(MetaKey.MAX_SCAN.key, maxScan);
}
/**
* Add a comment to the query.
* Add a comment to the query that is propagated to the profile log.
*
* @param comment
*/
@ -117,7 +134,9 @@ public class Meta { @@ -117,7 +134,9 @@ public class Meta {
* Using snapshot prevents the cursor from returning a document more than once.
*
* @param useSnapshot
* @deprecated since 2.1 due to deprecation as of MongoDB 3.6
*/
@Deprecated
public void setSnapshot(boolean useSnapshot) {
setValue(MetaKey.SNAPSHOT.key, useSnapshot);
}
@ -139,9 +158,11 @@ public class Meta { @@ -139,9 +158,11 @@ public class Meta {
}
/**
* Apply the batch size for a query.
* Apply the batch size (number of documents to return in each response) for a query. <br />
* Use {@literal 0 (zero)} for no limit. A <strong>negative limit</strong> closes the cursor after returning a single
* batch indicating to the server that the client will not ask for a subsequent one.
*
* @param cursorBatchSize
* @param cursorBatchSize The number of documents to return per batch.
* @since 2.1
*/
public void setCursorBatchSize(int cursorBatchSize) {

51
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java

@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.query; @@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.query;
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
import static org.springframework.util.ObjectUtils.*;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@ -289,7 +290,7 @@ public class Query { @@ -289,7 +290,7 @@ public class Query {
/**
* @param maxTimeMsec
* @return
* @return this.
* @see Meta#setMaxTimeMsec(long)
* @since 1.6
*/
@ -302,22 +303,38 @@ public class Query { @@ -302,22 +303,38 @@ public class Query {
/**
* @param timeout
* @param timeUnit
* @return
* @return this.
* @see Meta#setMaxTime(long, TimeUnit)
* @since 1.6
* @deprecated since 2.1. Use {@link #maxTime(Duration)} instead.
*/
@Deprecated
public Query maxTime(long timeout, TimeUnit timeUnit) {
meta.setMaxTime(timeout, timeUnit);
return this;
}
/**
* @param timeout
* @return this.
* @see Meta#setMaxTime(Duration)
* @since 2.1
*/
public Query maxTime(Duration timeout) {
meta.setMaxTime(timeout);
return this;
}
/**
* @param maxScan
* @return
* @return this.
* @see Meta#setMaxScan(long)
* @since 1.6
* @deprecated since 2.1 due to deprecation in MongoDB 4.0.
*/
@Deprecated
public Query maxScan(long maxScan) {
meta.setMaxScan(maxScan);
@ -325,8 +342,10 @@ public class Query { @@ -325,8 +342,10 @@ public class Query {
}
/**
* Add a comment to the query that is propagated to the profile log.
*
* @param comment
* @return
* @return this.
* @see Meta#setComment(String)
* @since 1.6
*/
@ -337,10 +356,12 @@ public class Query { @@ -337,10 +356,12 @@ public class Query {
}
/**
* @return
* @return this.
* @see Meta#setSnapshot(boolean)
* @since 1.6
* @deprecated since 2.1 due to deprecation as of MongoDB 3.6
*/
@Deprecated
public Query useSnapshot() {
meta.setSnapshot(true);
@ -348,8 +369,12 @@ public class Query { @@ -348,8 +369,12 @@ public class Query {
}
/**
* @param batchSize
* @return
* Set the number of documents to return in each response batch. <br />
* Use {@literal 0 (zero)} for no limit. A <strong>negative limit</strong> closes the cursor after returning a single
* batch indicating to the server that the client will not ask for a subsequent one.
*
* @param batchSize The number of documents to return per batch.
* @return this.
* @see Meta#setCursorBatchSize(int)
* @since 2.1
*/
@ -360,7 +385,7 @@ public class Query { @@ -360,7 +385,7 @@ public class Query {
}
/**
* @return
* @return this.
* @see org.springframework.data.mongodb.core.query.Meta.CursorOption#NO_TIMEOUT
* @since 1.10
*/
@ -371,7 +396,7 @@ public class Query { @@ -371,7 +396,7 @@ public class Query {
}
/**
* @return
* @return this.
* @see org.springframework.data.mongodb.core.query.Meta.CursorOption#EXHAUST
* @since 1.10
*/
@ -382,7 +407,9 @@ public class Query { @@ -382,7 +407,9 @@ public class Query {
}
/**
* @return
* Allows querying of a replica slave.
*
* @return this.
* @see org.springframework.data.mongodb.core.query.Meta.CursorOption#SLAVE_OK
* @since 1.10
*/
@ -393,7 +420,7 @@ public class Query { @@ -393,7 +420,7 @@ public class Query {
}
/**
* @return
* @return this.
* @see org.springframework.data.mongodb.core.query.Meta.CursorOption#PARTIAL
* @since 1.10
*/
@ -404,7 +431,7 @@ public class Query { @@ -404,7 +431,7 @@ public class Query {
}
/**
* @return never {@literal null}.
* @return never {@literal null}.ø
* @since 1.6
*/
public Meta getMeta() {

9
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java

@ -21,7 +21,6 @@ import java.lang.annotation.Retention; @@ -21,7 +21,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.core.annotation.AliasFor;
import org.springframework.data.annotation.QueryAnnotation;
/**
@ -51,12 +50,14 @@ public @interface Meta { @@ -51,12 +50,14 @@ public @interface Meta {
long maxScanDocuments() default -1;
/**
* Sets the number of documents to return per batch.
* Sets the number of documents to return per batch. <br />
* Use {@literal 0 (zero)} for no limit. A <strong>negative limit</strong> closes the cursor after returning a single
* batch indicating to the server that the client will not ask for a subsequent one.
*
* @return
* @return {@literal 0 (zero)} by default.
* @since 2.1
*/
int cursorBatchSize() default -1;
int cursorBatchSize() default 0;
/**
* Add a comment to the query.

5
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java

@ -19,6 +19,7 @@ import java.util.Arrays; @@ -19,6 +19,7 @@ import java.util.Arrays;
import java.util.List;
import org.springframework.data.domain.Range;
import org.springframework.data.domain.Range.Bound;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.query.Term;
@ -66,9 +67,9 @@ public class MongoParametersParameterAccessor extends ParametersParameterAccesso @@ -66,9 +67,9 @@ public class MongoParametersParameterAccessor extends ParametersParameterAccesso
}
int maxDistanceIndex = mongoParameters.getMaxDistanceIndex();
Distance maxDistance = maxDistanceIndex == -1 ? null : (Distance) getValue(maxDistanceIndex);
Bound<Distance> maxDistance = maxDistanceIndex == -1 ? Bound.unbounded() : Bound.inclusive((Distance) getValue(maxDistanceIndex));
return new Range<Distance>(null, maxDistance);
return Range.of(Bound.unbounded(), maxDistance);
}
/*

5
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java

@ -160,8 +160,7 @@ public class MongoQueryMethod extends QueryMethod { @@ -160,8 +160,7 @@ public class MongoQueryMethod extends QueryMethod {
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
: managedEntity;
this.metadata = new SimpleMongoEntityMetadata<>((Class<Object>) returnedEntity.getType(),
collectionEntity);
this.metadata = new SimpleMongoEntityMetadata<>((Class<Object>) returnedEntity.getType(), collectionEntity);
}
}
@ -274,7 +273,7 @@ public class MongoQueryMethod extends QueryMethod { @@ -274,7 +273,7 @@ public class MongoQueryMethod extends QueryMethod {
metaAttributes.setMaxScan(meta.maxScanDocuments());
}
if (meta.cursorBatchSize() > 0) {
if (meta.cursorBatchSize() != 0) {
metaAttributes.setCursorBatchSize(meta.cursorBatchSize());
}

4
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java

@ -138,7 +138,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests { @@ -138,7 +138,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
when(collection.withReadPreference(any())).thenReturn(collection);
when(findIterable.projection(any())).thenReturn(findIterable);
when(findIterable.sort(any(org.bson.Document.class))).thenReturn(findIterable);
when(findIterable.modifiers(any(org.bson.Document.class))).thenReturn(findIterable);
when(findIterable.collation(any())).thenReturn(findIterable);
when(findIterable.limit(anyInt())).thenReturn(findIterable);
when(mapReduceIterable.collation(any())).thenReturn(mapReduceIterable);
@ -733,7 +732,8 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests { @@ -733,7 +732,8 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1518
public void findAndRemoveManyShouldUseCollationWhenPresent() {
template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class, true);
template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class,
true);
ArgumentCaptor<DeleteOptions> options = ArgumentCaptor.forClass(DeleteOptions.class);
verify(collection).deleteMany(any(), options.capture());

51
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java

@ -15,8 +15,6 @@ @@ -15,8 +15,6 @@
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.core.IsEqual.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
@ -27,13 +25,12 @@ import org.bson.Document; @@ -27,13 +25,12 @@ import org.bson.Document;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate.QueryCursorPreparer;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Meta;
import org.springframework.data.mongodb.core.query.Query;
@ -53,13 +50,16 @@ public class QueryCursorPreparerUnitTests { @@ -53,13 +50,16 @@ public class QueryCursorPreparerUnitTests {
@Mock MongoExceptionTranslator exceptionTranslatorMock;
@Mock FindIterable<Document> cursor;
@Mock FindIterable<Document> cursorToUse;
@Before
public void setUp() {
when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock);
when(cursor.modifiers(any(Document.class))).thenReturn(cursor);
when(cursor.batchSize(anyInt())).thenReturn(cursor);
when(cursor.comment(anyString())).thenReturn(cursor);
when(cursor.maxTime(anyLong(), any())).thenReturn(cursor);
when(cursor.maxScan(anyLong())).thenReturn(cursor);
when(cursor.hint(any())).thenReturn(cursor);
when(cursor.snapshot(anyBoolean())).thenReturn(cursor);
when(cursor.noCursorTimeout(anyBoolean())).thenReturn(cursor);
when(cursor.collation(any())).thenReturn(cursor);
}
@ -67,13 +67,10 @@ public class QueryCursorPreparerUnitTests { @@ -67,13 +67,10 @@ public class QueryCursorPreparerUnitTests {
@Test // DATAMONGO-185
public void appliesHintsCorrectly() {
Query query = query(where("foo").is("bar")).withHint("hint");
Query query = query(where("foo").is("bar")).withHint("{ age: 1 }");
prepare(query);
ArgumentCaptor<Document> captor = ArgumentCaptor.forClass(Document.class);
verify(cursor).modifiers(captor.capture());
assertThat(captor.getValue(), equalTo(new Document("$hint", "hint")));
verify(cursor).hint(new Document("age", 1));
}
@Test // DATAMONGO-957
@ -84,55 +81,43 @@ public class QueryCursorPreparerUnitTests { @@ -84,55 +81,43 @@ public class QueryCursorPreparerUnitTests {
prepare(query);
verify(cursorToUse, never()).modifiers(any(Document.class));
verify(cursor, never()).modifiers(any(Document.class));
}
@Test // DATAMONGO-957
public void appliesMaxScanCorrectly() {
Query query = query(where("foo").is("bar")).maxScan(100);
prepare(query);
ArgumentCaptor<Document> captor = ArgumentCaptor.forClass(Document.class);
verify(cursor).modifiers(captor.capture());
assertThat(captor.getValue(), equalTo(new Document("$maxScan", 100L)));
verify(cursor).maxScan(100);
}
@Test // DATAMONGO-957
public void appliesMaxTimeCorrectly() {
Query query = query(where("foo").is("bar")).maxTime(1, TimeUnit.SECONDS);
prepare(query);
ArgumentCaptor<Document> captor = ArgumentCaptor.forClass(Document.class);
verify(cursor).modifiers(captor.capture());
assertThat(captor.getValue(), equalTo(new Document("$maxTimeMS", 1000L)));
verify(cursor).maxTime(1000, TimeUnit.MILLISECONDS);
}
@Test // DATAMONGO-957
public void appliesCommentCorrectly() {
Query query = query(where("foo").is("bar")).comment("spring data");
prepare(query);
ArgumentCaptor<Document> captor = ArgumentCaptor.forClass(Document.class);
verify(cursor).modifiers(captor.capture());
assertThat(captor.getValue(), equalTo(new Document("$comment", "spring data")));
verify(cursor).comment("spring data");
}
@Test // DATAMONGO-957
public void appliesSnapshotCorrectly() {
Query query = query(where("foo").is("bar")).useSnapshot();
prepare(query);
ArgumentCaptor<Document> captor = ArgumentCaptor.forClass(Document.class);
verify(cursor).modifiers(captor.capture());
assertThat(captor.getValue(), equalTo(new Document("$snapshot", true)));
verify(cursor).snapshot(true);
}
@Test // DATAMONGO-1480
@ -153,6 +138,14 @@ public class QueryCursorPreparerUnitTests { @@ -153,6 +138,14 @@ public class QueryCursorPreparerUnitTests {
verify(cursor).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build()));
}
@Test // DATAMONGO-1311
public void appliesBatchSizeCorrectly() {
prepare(new BasicQuery("{}").cursorBatchSize(100));
verify(cursor).batchSize(100);
}
private FindIterable<Document> prepare(Query query) {
CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query, null);

22
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java

@ -19,12 +19,8 @@ import static org.hamcrest.Matchers.*; @@ -19,12 +19,8 @@ import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.mockito.Mockito.any;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.reactivestreams.client.AggregatePublisher;
import lombok.Data;
import org.springframework.data.mongodb.core.query.Query;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@ -46,20 +42,22 @@ import org.springframework.beans.factory.annotation.Value; @@ -46,20 +42,22 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.MongoTemplateUnitTests.AutogenerateableId;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.Field;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.client.model.DeleteOptions;
import com.mongodb.client.model.FindOneAndDeleteOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.reactivestreams.client.AggregatePublisher;
import com.mongodb.reactivestreams.client.FindPublisher;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoCollection;
@ -264,20 +262,6 @@ public class ReactiveMongoTemplateUnitTests { @@ -264,20 +262,6 @@ public class ReactiveMongoTemplateUnitTests {
assertThat(options.getValue().getCollation().getLocale(), is("fr"));
}
@Ignore("currently no aggregation")
@Test // DATAMONGO-1518
public void aggregateShouldUseCollationWhenPresent() {
Aggregation aggregation = newAggregation(project("id"))
.withOptions(newAggregationOptions().collation(Collation.of("fr")).build());
// template.aggregate(aggregation, AutogenerateableId.class, Document.class).subscribe();
ArgumentCaptor<Document> cmd = ArgumentCaptor.forClass(Document.class);
verify(db).runCommand(cmd.capture(), any(Class.class));
assertThat(cmd.getValue().get("collation", Document.class), equalTo(new Document("locale", "fr")));
}
@Ignore("currently no mapReduce")
@Test // DATAMONGO-1518
public void mapReduceShouldUseCollationWhenPresent() {

12
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java

@ -155,6 +155,15 @@ public class MongoQueryMethodUnitTests { @@ -155,6 +155,15 @@ public class MongoQueryMethodUnitTests {
assertThat(method.getQueryMetaAttributes().getCursorBatchSize(), is(100));
}
@Test // DATAMONGO-1311
public void createsMongoQueryMethodWithNegativeBatchSizeCorrectly() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "negativeBatchSize");
assertThat(method.hasQueryMetaAttributes(), is(true));
assertThat(method.getQueryMetaAttributes().getCursorBatchSize(), is(-200));
}
@Test // DATAMONGO-1403
public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception {
@ -245,6 +254,9 @@ public class MongoQueryMethodUnitTests { @@ -245,6 +254,9 @@ public class MongoQueryMethodUnitTests {
@Meta(cursorBatchSize = 100)
List<User> batchSize();
@Meta(cursorBatchSize = -200)
List<User> negativeBatchSize();
@Meta(maxExecutionTimeMs = 100)
List<User> metaWithMaxExecutionTime();

29
src/main/asciidoc/reference/mongodb.adoc

@ -1761,6 +1761,35 @@ GeoResults<Jedi> results = mongoOps.query(SWCharacter.class) @@ -1761,6 +1761,35 @@ GeoResults<Jedi> results = mongoOps.query(SWCharacter.class)
----
====
[[mongo.query.additional-query-options]]
=== Additional Query Options
MongoDB offers various ways of applying meta information, like a comment or a batch size, to a query. Using the `Query` API
directly there are several methods for those options.
====
[source,java]
----
Query query = query(where("firstname").is("luke"))
.comment("find luke") <1>
.batchSize(100) <2>
.slaveOk(); <3>
----
<1> The comment propagated to the MongoDB profile log.
<2> The number of documents to return in each response batch.
<3> Allows querying a replica slave.
====
On the repository level the `@Meta` annotation provides means to add query options in a declarative way.
====
[source,java]
----
@Meta(comment = "find luke", batchSize = 100, flags = { SLAVE_OK })
List<Person> findByFirstname(String firstname);
----
====
include::../{spring-data-commons-docs}/query-by-example.adoc[leveloffset=+1]
include::query-by-example.adoc[leveloffset=+1]

Loading…
Cancel
Save