Browse Source

DATAMONGO-934 - Polishing.

Polished JavaDoc and implementation as well as tests. Extracted Tuple to Spring Data Commons. Moved exception translation into MongoExceptionTranslator.

Changed implementation of DefaultBulkOperations to consider the WriteConcernResolver of the underlying MongoTemplate to avoid exposing the WriteConcern on execution.

Original pull request: #327.
Related tickets: DATACMNS-790.
pull/337/head
Oliver Gierke 10 years ago
parent
commit
7862841b48
  1. 23
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java
  2. 106
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java
  3. 268
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java
  4. 34
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java
  5. 4
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java
  6. 16
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java
  7. 64
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java
  8. 137
      spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java
  9. 218
      spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java

23
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java

@ -27,16 +27,28 @@ import com.mongodb.BulkWriteResult; @@ -27,16 +27,28 @@ import com.mongodb.BulkWriteResult;
* Is thrown when errors occur during bulk operations.
*
* @author Tobias Trelle
* @author Oliver Gierke
* @since 1.9
*/
public class BulkOperationException extends DataAccessException {
private static final long serialVersionUID = 73929601661154421L;
private final List<BulkWriteError> errors;
private final BulkWriteResult result;
public BulkOperationException(String msg, BulkWriteException e) {
super(msg, e);
this.errors = e.getWriteErrors();
this.result = e.getWriteResult();
/**
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
*
* @param message must not be {@literal null}.
* @param source must not be {@literal null}.
*/
public BulkOperationException(String message, BulkWriteException source) {
super(message, source);
this.errors = source.getWriteErrors();
this.result = source.getWriteResult();
}
public List<BulkWriteError> getErrors() {
@ -46,5 +58,4 @@ public class BulkOperationException extends DataAccessException { @@ -46,5 +58,4 @@ public class BulkOperationException extends DataAccessException {
public BulkWriteResult getResult() {
return result;
}
}

106
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java

@ -17,26 +17,27 @@ package org.springframework.data.mongodb.core; @@ -17,26 +17,27 @@ package org.springframework.data.mongodb.core;
import java.util.List;
import org.springframework.data.mongodb.BulkOperationException;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.util.Tuple;
import org.springframework.data.util.Tuple;
import com.mongodb.BulkWriteResult;
import com.mongodb.WriteConcern;
/**
* Bulk operations for insert/update/remove actions on a collection.
* <p/>
* These bulks operation are available since MongoDB 2.6 and make use of low level bulk commands on the protocol level.
* <p/>
* This interface defines a fluent API add multiple single operations or list of similar operations in sequence.
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
* {@link #execute()}.
*
* @author Tobias Trelle
* @author Oliver Gierke
* @since 1.9
*/
public interface BulkOperations {
/** Mode for bulk operation. */
/**
* Mode for bulk operation.
**/
public enum BulkMode {
/** Perform bulk operations in sequence. The first error will cancel processing. */
@ -45,44 +46,37 @@ public interface BulkOperations { @@ -45,44 +46,37 @@ public interface BulkOperations {
/** Perform bulk operations in parallel. Processing will continue on errors. */
UNORDERED
};
/**
* Add a single insert to the bulk operation.
*
* @param documents List of documents to insert.
*
* @return The bulk operation.
*
* @throws BulkOperationException if an error occured during bulk processing.
* @param documents the document to insert, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
*/
BulkOperations insert(Object documents);
BulkOperations insert(Object documents);
/**
* Add a list of inserts to the bulk operation.
*
* @param documents List of documents to insert.
*
* @return The bulk operation.
*
* @throws BulkOperationException if an error occured during bulk processing.
* @param documents List of documents to insert, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
*/
BulkOperations insert(List<? extends Object> documents);
/**
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
*
* @param query Update criteria.
* @param update Update operation to perform.
*
* @return The bulk operation.
* @param query update criteria, must not be {@literal null}.
* @param update {@link Update} operation to perform, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateOne(Query query, Update update);
BulkOperations updateOne(Query query, Update update);
/**
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
*
* @param updates Update operations to perform.
*
* @return The bulk operation.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateOne(List<Tuple<Query, Update>> updates);
@ -91,17 +85,16 @@ public interface BulkOperations { @@ -91,17 +85,16 @@ public interface BulkOperations {
*
* @param query Update criteria.
* @param update Update operation to perform.
*
* @return The bulk operation.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateMulti(Query query, Update update);
/**
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
*
* @param updates Update operations to perform.
*
* @return The bulk operation.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateMulti(List<Tuple<Query, Update>> updates);
@ -111,36 +104,34 @@ public interface BulkOperations { @@ -111,36 +104,34 @@ public interface BulkOperations {
*
* @param query Update criteria.
* @param update Update operation to perform.
*
* @return The bulk operation.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations upsert(Query query, Update update);
BulkOperations upsert(Query query, Update update);
/**
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty, else an
* insert.
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
* else an insert.
*
* @param updates Updates/insert operations to perform.
*
* @return The bulk operation.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations upsert(List<Tuple<Query, Update>> updates);
/**
* Add a single remove operation to the bulk operation.
*
* @param remove operations to perform.
*
* @return The bulk operation.
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
*/
BulkOperations remove(Query remove);
BulkOperations remove(Query remove);
/**
* Add a list of remove operations to the bulk operation.
*
* @param remove operations to perform.
*
* @return The bulk operation.
* @param removes the remove operations to perform, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
*/
BulkOperations remove(List<Query> removes);
@ -148,20 +139,7 @@ public interface BulkOperations { @@ -148,20 +139,7 @@ public interface BulkOperations {
* Execute all bulk operations using the default write concern.
*
* @return Result of the bulk operation providing counters for inserts/updates etc.
*
* @throws BulkOperationException if errors occur during the exection of the bulk operations.
* @throws {@link BulkOperationException} if an error occurred during bulk processing.
*/
BulkWriteResult executeBulk();
/**
* Execute all bulk operations using the given write concern.
*
* @param writeConcern Write concern to use.
*
* @return Result of the bulk operation providing counters for inserts/updates etc.
*
* @throws BulkOperationException if errors occur during the exection of the bulk operations.
*/
BulkWriteResult executeBulk(WriteConcern writeConcern);
BulkWriteResult execute();
}

268
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java

@ -15,16 +15,19 @@ @@ -15,16 +15,19 @@
*/
package org.springframework.data.mongodb.core;
import java.util.Arrays;
import java.util.List;
import org.springframework.data.mongodb.BulkOperationException;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.util.Tuple;
import org.springframework.data.util.Tuple;
import org.springframework.util.Assert;
import com.mongodb.BulkWriteException;
import com.mongodb.BulkWriteOperation;
import com.mongodb.BulkWriteRequestBuilder;
import com.mongodb.BulkWriteResult;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
@ -34,45 +37,99 @@ import com.mongodb.WriteConcern; @@ -34,45 +37,99 @@ import com.mongodb.WriteConcern;
* Default implementation for {@link BulkOperations}.
*
* @author Tobias Trelle
* @author Oliver Gierke
* @since 1.9
*/
public class DefaultBulkOperations implements BulkOperations {
class DefaultBulkOperations implements BulkOperations {
private final MongoOperations mongoOperations;
private final BulkMode bulkMode;
private final String collectionName;
private final WriteConcern writeConcernDefault;
private final Class<?> entityType;
private PersistenceExceptionTranslator exceptionTranslator;
private WriteConcernResolver writeConcernResolver;
private WriteConcern defaultWriteConcern;
private BulkWriteOperation bulk;
/**
* Creates a new {@link DefaultBulkOperations}.
* Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, {@link BulkMode}, collection
* name and {@link WriteConcern}.
*
* @param mongoOperations The underlying Mongo operations.
* @param bulkMode The bulk mode (ordered or unordered).
* @param collectionName Name of the collection to work on.
* @param writeConcernDefault The default write concern for all executions.
* @param mongoOperations The underlying {@link MongoOperations}, must not be {@literal null}.
* @param bulkMode must not be {@literal null}.
* @param collectionName Name of the collection to work on, must not be {@literal null} or empty.
* @param entityType the entity type, can be {@literal null}.
*/
public DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName,
WriteConcern writeConcernDefault) {
DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName,
Class<?> entityType) {
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
Assert.notNull(collectionName, "Collection name can not be null!");
Assert.notNull(bulkMode, "BulkMode must not be null!");
Assert.hasText(collectionName, "Collection name must not be null or empty!");
this.mongoOperations = mongoOperations;
this.bulkMode = bulkMode;
this.collectionName = collectionName;
this.writeConcernDefault = writeConcernDefault;
initBulkOp();
this.entityType = entityType;
this.exceptionTranslator = new MongoExceptionTranslator();
this.writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
this.bulk = initBulkOperation();
}
/**
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
*
* @param exceptionTranslator can be {@literal null}.
*/
public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) {
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
}
/**
* Configures the {@link WriteConcernResolver} to be used. Defaults to {@link DefaultWriteConcernResolver}.
*
* @param writeConcernResolver can be {@literal null}.
*/
public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) {
this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE
: writeConcernResolver;
}
/**
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
*
* @param defaultWriteConcern can be {@literal null}.
*/
public void setDefaultWriteConcern(WriteConcern defaultWriteConcern) {
this.defaultWriteConcern = defaultWriteConcern;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
*/
@Override
public BulkOperations insert(Object document) {
bulk.insert((DBObject) mongoOperations.getConverter().convertToMongoType(document));
Assert.notNull(document, "Document must not be null!");
bulk.insert((DBObject) mongoOperations.getConverter().convertToMongoType(document));
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
*/
@Override
public BulkOperations insert(List<? extends Object> documents) {
Assert.notNull(documents, "Documents must not be null!");
for (Object document : documents) {
insert(document);
}
@ -80,126 +137,191 @@ public class DefaultBulkOperations implements BulkOperations { @@ -80,126 +137,191 @@ public class DefaultBulkOperations implements BulkOperations {
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
*/
@Override
@SuppressWarnings("unchecked")
public BulkOperations updateOne(Query query, Update update) {
return update(query, update, false, false);
Assert.notNull(query, "Query must not be null!");
Assert.notNull(update, "Update must not be null!");
return updateOne(Arrays.asList(Tuple.of(query, update)));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
*/
@Override
public BulkOperations updateOne(List<Tuple<Query, Update>> updates) {
Assert.notNull(updates, "Updates must not be null!");
for (Tuple<Query, Update> update : updates) {
update(update.getFirst(), update.getSecond(), false, false);
}
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
*/
@Override
@SuppressWarnings("unchecked")
public BulkOperations updateMulti(Query query, Update update) {
return update(query, update, false, true);
Assert.notNull(query, "Query must not be null!");
Assert.notNull(update, "Update must not be null!");
return updateMulti(Arrays.asList(Tuple.of(query, update)));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
*/
@Override
public BulkOperations updateMulti(List<Tuple<Query, Update>> updates) {
Assert.notNull(updates, "Updates must not be null!");
for (Tuple<Query, Update> update : updates) {
update(update.getFirst(), update.getSecond(), false, true);
}
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
*/
@Override
public BulkOperations upsert(Query query, Update update) {
return update(query, update, true, true);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
*/
@Override
public BulkOperations upsert(List<Tuple<Query, Update>> updates) {
for (Tuple<Query, Update> update : updates) {
upsert(update.getFirst(), update.getSecond());
}
return this;
}
/**
* Performs update and upsert bulk operations.
*
* @param query Criteria to match documents.
* @param update Update to perform.
* @param upsert Upsert flag.
* @param multi Multi update flag.
* @param writeConcern The write concern to use.
*
* @return Self reference.
*
* @throws BulkOperationException if an error occured during bulk processing.
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
*/
protected BulkOperations update(Query query, Update pdate, boolean upsert, boolean multi) {
if (upsert) {
if (multi) {
bulk.find(query.getQueryObject()).upsert().update(pdate.getUpdateObject());
} else {
bulk.find(query.getQueryObject()).upsert().updateOne(pdate.getUpdateObject());
}
} else {
if (multi) {
bulk.find(query.getQueryObject()).update(pdate.getUpdateObject());
} else {
bulk.find(query.getQueryObject()).updateOne(pdate.getUpdateObject());
}
}
@Override
public BulkOperations remove(Query query) {
return this;
}
Assert.notNull(query, "Query must not be null!");
@Override
public BulkOperations remove(Query remove) {
bulk.find(remove.getQueryObject()).remove();
bulk.find(query.getQueryObject()).remove();
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
*/
@Override
public BulkOperations remove(List<Query> removes) {
Assert.notNull(removes, "Removals must not be null!");
for (Query query : removes) {
remove(query);
}
return this;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
*/
@Override
public BulkWriteResult executeBulk() {
return executeBulk(writeConcernDefault);
}
public BulkWriteResult execute() {
MongoAction action = new MongoAction(defaultWriteConcern, MongoActionOperation.BULK, collectionName, entityType,
null, null);
WriteConcern writeConcern = writeConcernResolver.resolve(action);
@Override
public BulkWriteResult executeBulk(WriteConcern writeConcern) {
try {
if (writeConcern != null) {
return bulk.execute(writeConcern);
return writeConcern == null ? bulk.execute() : bulk.execute(writeConcern);
} catch (BulkWriteException o_O) {
DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O);
throw toThrow == null ? o_O : toThrow;
} finally {
this.bulk = initBulkOperation();
}
}
/**
* Performs update and upsert bulk operations.
*
* @param query the {@link Query} to determine documents to update.
* @param update the {@link Update} to perform, must not be {@literal null}.
* @param upsert whether to upsert.
* @param multi whether to issue a multi-update.
* @return the {@link BulkOperations} with the update registered.
*/
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
Assert.notNull(query, "Query must not be null!");
Assert.notNull(update, "Update must not be null!");
BulkWriteRequestBuilder builder = bulk.find(query.getQueryObject());
if (upsert) {
if (multi) {
builder.upsert().update(update.getUpdateObject());
} else {
return bulk.execute();
builder.upsert().updateOne(update.getUpdateObject());
}
} else {
if (multi) {
builder.update(update.getUpdateObject());
} else {
builder.updateOne(update.getUpdateObject());
}
} catch (BulkWriteException e) {
throw new BulkOperationException("Bulk operation did not complete", e);
} finally {
// reset bulk for future use
initBulkOp();
}
return this;
}
private void initBulkOp() {
this.bulk = createBulkOperation(bulkMode, mongoOperations.getCollection(collectionName));
}
private final BulkWriteOperation initBulkOperation() {
private BulkWriteOperation createBulkOperation(BulkMode mode, DBCollection collection) {
switch (mode) {
case ORDERED:
return collection.initializeOrderedBulkOperation();
case UNORDERED:
return collection.initializeUnorderedBulkOperation();
default:
return null;
DBCollection collection = mongoOperations.getCollection(collectionName);
switch (bulkMode) {
case ORDERED:
return collection.initializeOrderedBulkOperation();
case UNORDERED:
return collection.initializeUnorderedBulkOperation();
}
}
throw new IllegalStateException("BulkMode was null!");
}
}

34
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/Tuple.java → spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java

@ -13,32 +13,20 @@ @@ -13,32 +13,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.util;
package org.springframework.data.mongodb.core;
import com.mongodb.WriteConcern;
/**
* A tuple of things.
*
* @author Tobias Trelle
* Default {@link WriteConcernResolver} resolving the {@link WriteConcern} from the given {@link MongoAction}.
*
* @param <T> Type of the first thing.
* @param <S> Type of the second thing.
* @author Oliver Gierke
*/
public class Tuple<T,S> {
private T t;
private S s;
public Tuple(T t, S s) {
this.t = t;
this.s = s;
}
public T getFirst() {
return t;
}
public S getSecond() {
return s;
enum DefaultWriteConcernResolver implements WriteConcernResolver {
INSTANCE;
public WriteConcern resolve(MongoAction action) {
return action.getDefaultWriteConcern();
}
}

4
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java

@ -1,5 +1,5 @@ @@ -1,5 +1,5 @@
/*
* Copyright 2011-2012 the original author or authors.
* Copyright 2011-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -25,5 +25,5 @@ package org.springframework.data.mongodb.core; @@ -25,5 +25,5 @@ package org.springframework.data.mongodb.core;
*/
public enum MongoActionOperation {
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK;
}

16
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java

@ -27,10 +27,12 @@ import org.springframework.dao.InvalidDataAccessApiUsageException; @@ -27,10 +27,12 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.springframework.dao.PermissionDeniedDataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mongodb.BulkOperationException;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
import org.springframework.util.ClassUtils;
import com.mongodb.BulkWriteException;
import com.mongodb.MongoException;
/**
@ -44,12 +46,12 @@ import com.mongodb.MongoException; @@ -44,12 +46,12 @@ import com.mongodb.MongoException;
*/
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(Arrays.asList(
"MongoException.DuplicateKey", "DuplicateKeyException"));
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(
Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException"));
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(Arrays.asList(
"MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(
Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
private static final Set<String> RESOURCE_USAGE_EXCEPTIONS = new HashSet<String>(
Arrays.asList("MongoInternalException"));
@ -83,6 +85,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator @@ -83,6 +85,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
return new DataIntegrityViolationException(ex.getMessage(), ex);
}
if (ex instanceof BulkWriteException) {
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
}
// All other MongoExceptions
if (ex instanceof MongoException) {

64
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java

@ -294,25 +294,33 @@ public interface MongoOperations { @@ -294,25 +294,33 @@ public interface MongoOperations {
ScriptOperations scriptOps();
/**
* Returns the bulk operations.
* Returns a new {@link BulkOperations} for the given collection.
*
* @param bulkMode Mode to use for bulk operations (ordered, unordered).
* @param collectionsName Name of the collection to work on.
v *
* @return index operations on the named collection
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
* @return {@link BulkOperations} on the named collection
*/
BulkOperations bulkOps(BulkMode bulkMode, String collectionsName);
BulkOperations bulkOps(BulkMode mode, String collectionName);
/**
* Returns the bulk operations.
* Returns a new {@link BulkOperations} for the given entity type.
*
* @param bulkMode Mode to use for bulk operations (ordered, unordered).
* @param entityClass Name of the entity class.
*
* @return index operations on the named collection associated with the given entity class
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param entityType the name of the entity class, must not be {@literal null}.
* @return {@link BulkOperations} on the named collection associated of the given entity class.
*/
BulkOperations bulkOps(BulkMode mode, Class<?> entityType);
/**
* Returns a new {@link BulkOperations} for the given entity type and collection name.
*
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param entityClass the name of the entity class, must not be {@literal null}.
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
* @return {@link BulkOperations} on the named collection associated with the given entity class.
*/
BulkOperations bulkOps(BulkMode bulkMode, Class<?> entityClass);
BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName);
/**
* Query for a list of objects of type T from the collection used by the entity class.
* <p/>
@ -621,8 +629,8 @@ v * @@ -621,8 +629,8 @@ v *
<T> T findById(Object id, Class<T> entityClass, String collectionName);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
@ -633,8 +641,8 @@ v * @@ -633,8 +641,8 @@ v *
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
@ -646,8 +654,8 @@ v * @@ -646,8 +654,8 @@ v *
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
@ -660,8 +668,8 @@ v * @@ -660,8 +668,8 @@ v *
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
@ -749,9 +757,9 @@ v * @@ -749,9 +757,9 @@ v *
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
* >Spring's Type Conversion"</a> for more details.
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
* <p/>
* <p/>
* Insert is used to initially store the object into the database. To update an existing object use the save method.
@ -806,9 +814,9 @@ v * @@ -806,9 +814,9 @@ v *
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
* >Spring's Type Conversion"</a> for more details.
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
*
* @param objectToSave the object to store in the collection
*/

137
spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java

@ -339,8 +339,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -339,8 +339,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DBCursor cursor = collection.find(mappedQuery, mappedFields);
QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType);
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType, collection
.getName());
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType,
collection.getName());
return new CloseableIterableCursorAdapter<T>(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback);
}
@ -373,8 +373,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -373,8 +373,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
*/
@Deprecated
public CommandResult executeCommand(final DBObject command, final int options) {
return executeCommand(command, (options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred()
: ReadPreference.primary());
return executeCommand(command,
(options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred() : ReadPreference.primary());
}
/*
@ -422,7 +422,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -422,7 +422,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
* limits, skips and so on).
*/
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch, CursorPreparer preparer) {
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch,
CursorPreparer preparer) {
Assert.notNull(query);
@ -546,13 +547,27 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -546,13 +547,27 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
}
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
return new DefaultBulkOperations(this, bulkMode, collectionName, writeConcern);
return bulkOps(bulkMode, null, collectionName);
}
public BulkOperations bulkOps(BulkMode bulkMode, Class<?> entityClass) {
return new DefaultBulkOperations(this, bulkMode, determineCollectionName(entityClass), writeConcern);
return bulkOps(bulkMode, entityClass, determineCollectionName(entityClass));
}
public BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName) {
Assert.notNull(mode, "BulkMode must not be null!");
Assert.hasText(collectionName, "Collection name must not be null or empty!");
DefaultBulkOperations operations = new DefaultBulkOperations(this, mode, collectionName, entityType);
operations.setExceptionTranslator(exceptionTranslator);
operations.setWriteConcernResolver(writeConcernResolver);
operations.setDefaultWriteConcern(writeConcern);
return operations;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.MongoOperations#scriptOps()
@ -646,8 +661,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -646,8 +661,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
List<Object> results = (List<Object>) commandResult.get("results");
results = results == null ? Collections.emptyList() : results;
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(new ReadDbObjectCallback<T>(
mongoConverter, entityClass, collectionName), near.getMetric());
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), near.getMetric());
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
int index = 0;
@ -724,8 +739,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -724,8 +739,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
public long count(Query query, Class<?> entityClass, String collectionName) {
Assert.hasText(collectionName);
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
final DBObject dbObject = query == null ? null
: queryMapper.getMappedObject(query.getQueryObject(),
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
return execute(collectionName, new CollectionCallback<Long>() {
public Long doInCollection(DBCollection collection) throws MongoException, DataAccessException {
@ -996,8 +1012,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -996,8 +1012,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName,
entityClass, dbDoc, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc) : collection.insert(dbDoc,
writeConcernToUse);
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc)
: collection.insert(dbDoc, writeConcernToUse);
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.INSERT);
return dbDoc.get(ID_FIELD);
}
@ -1017,8 +1033,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1017,8 +1033,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null,
null, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList) : collection.insert(
dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList)
: collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
handleAnyWriteResultErrors(writeResult, null, MongoActionOperation.INSERT_LIST);
return null;
}
@ -1046,8 +1062,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1046,8 +1062,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
dbDoc, null);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc) : collection.save(dbDoc,
writeConcernToUse);
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc)
: collection.save(dbDoc, writeConcernToUse);
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.SAVE);
return dbDoc.get(ID_FIELD);
}
@ -1100,10 +1116,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1100,10 +1116,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
increaseVersionForUpdateIfNecessary(entity, update);
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
entity);
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
update.getUpdateObject(), entity);
DBObject queryObj = query == null ? new BasicDBObject()
: queryMapper.getMappedObject(query.getQueryObject(), entity);
DBObject updateObj = update == null ? new BasicDBObject()
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
@ -1244,9 +1260,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1244,9 +1260,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty);
if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) {
throw new InvalidDataAccessApiUsageException(String.format(
"Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(), entity.getClass()
.getName()));
throw new InvalidDataAccessApiUsageException(
String.format("Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(),
entity.getClass().getName()));
}
}
@ -1285,12 +1301,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1285,12 +1301,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
collection.getName() });
LOGGER.debug("Remove using query: {} in collection: {}.",
new Object[] { serializeToJsonSafely(dboq), collection.getName() });
}
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
writeConcernToUse);
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq)
: collection.remove(dboq, writeConcernToUse);
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
@ -1306,8 +1322,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1306,8 +1322,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
}
public <T> List<T> findAll(Class<T> entityClass, String collectionName) {
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
entityClass, collectionName), collectionName);
return executeFindMultiInternal(new FindCallback(null), null,
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), collectionName);
}
public <T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
@ -1323,8 +1339,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1323,8 +1339,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
String reduceFunction, Class<T> entityClass) {
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction,
new MapReduceOptions().outputTypeInline(), entityClass);
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(),
entityClass);
}
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
@ -1335,8 +1351,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1335,8 +1351,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DBCollection inputCollection = getCollection(inputCollectionName);
MapReduceCommand command = new MapReduceCommand(inputCollection, mapFunc, reduceFunc,
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(), query == null
|| query.getQueryObject() == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null));
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(),
query == null || query.getQueryObject() == null ? null
: queryMapper.getMappedObject(query.getQueryObject(), null));
copyMapReduceOptionsToCommand(query, mapReduceOptions, command);
@ -1669,8 +1686,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1669,8 +1686,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
}
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
this.mongoConverter, entityClass, collectionName), collectionName);
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields),
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName), collectionName);
}
/**
@ -1684,8 +1701,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1684,8 +1701,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* @return the List of converted objects.
*/
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
entityClass, collectionName));
return doFind(collectionName, query, fields, entityClass, null,
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName));
}
/**
@ -1703,8 +1720,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1703,8 +1720,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
*/
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
CursorPreparer preparer) {
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
entityClass, collectionName));
return doFind(collectionName, query, fields, entityClass, preparer,
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName));
}
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
@ -1780,9 +1797,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1780,9 +1797,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
serializeToJsonSafely(mappedUpdate), collectionName));
LOGGER
.debug(
String.format(
"findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
+ "in collection: %s",
serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate),
collectionName));
}
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
@ -1852,8 +1873,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1852,8 +1873,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
DbObjectCallback<T> objectCallback, String collectionName) {
try {
T result = objectCallback.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(),
collectionName)));
T result = objectCallback
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName)));
return result;
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
@ -1878,8 +1899,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1878,8 +1899,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
* @param collectionName the collection to be queried
* @return
*/
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback,
CursorPreparer preparer, DbObjectCallback<T> objectCallback, String collectionName) {
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback, CursorPreparer preparer,
DbObjectCallback<T> objectCallback, String collectionName) {
try {
@ -1969,8 +1990,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -1969,8 +1990,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
if (entity == null) {
throw new InvalidDataAccessApiUsageException("No Persistent Entity information found for the class "
+ entityClass.getName());
throw new InvalidDataAccessApiUsageException(
"No Persistent Entity information found for the class " + entityClass.getName());
}
return entity.getCollection();
}
@ -2034,8 +2055,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -2034,8 +2055,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
String error = result.getErrorMessage();
error = error == null ? "NO MESSAGE" : error;
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
+ source, ex);
throw new InvalidDataAccessApiUsageException(
"Command execution failed: Error [" + error + "], Command = " + source, ex);
}
}
@ -2231,7 +2252,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -2231,7 +2252,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type, String collectionName) {
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type,
String collectionName) {
super(reader, type, collectionName);
}
@ -2258,15 +2280,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { @@ -2258,15 +2280,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
}
}
private enum DefaultWriteConcernResolver implements WriteConcernResolver {
INSTANCE;
public WriteConcern resolve(MongoAction action) {
return action.getDefaultWriteConcern();
}
}
class QueryCursorPreparer implements CursorPreparer {
private final Query query;

218
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java

@ -15,10 +15,8 @@ @@ -15,10 +15,8 @@
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
@ -33,7 +31,7 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode; @@ -33,7 +31,7 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.util.Tuple;
import org.springframework.data.util.Tuple;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@ -47,148 +45,187 @@ import com.mongodb.WriteConcern; @@ -47,148 +45,187 @@ import com.mongodb.WriteConcern;
* Integration tests for {@link DefaultBulkOperations}.
*
* @author Tobias Trelle
* @author Oliver Gierke
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:infrastructure.xml")
public class DefaultBulkOperationsIntegrationTests {
private static final String COLLECTION_NAME = "bulk_ops";
static final String COLLECTION_NAME = "bulk_ops";
@Autowired
private MongoTemplate template;
@Autowired MongoOperations operations;
private DBCollection collection;
private BulkOperations bulkOps;
DBCollection collection;
@Before
public void setUp() {
this.collection = this.template.getDb().getCollection(COLLECTION_NAME);
this.collection = this.operations.getCollection(COLLECTION_NAME);
this.collection.remove(new BasicDBObject());
}
/**
* @see DATAMONGO-934
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsNullMongoOperations() {
new DefaultBulkOperations(null, null, COLLECTION_NAME, null);
}
/**
* @see DATAMONGO-934
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsNullCollectionName() {
new DefaultBulkOperations(operations, null, null, null);
}
/**
* @see DATAMONGO-934
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsEmptyCollectionName() {
new DefaultBulkOperations(operations, null, "", null);
}
/**
* @see DATAMONGO-934
*/
@Test
public void insertOrdered() {
// given
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
bulkOps = createBulkOps(BulkMode.ORDERED);
// when
int n = bulkOps.insert(documents).executeBulk().getInsertedCount();
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
// then
assertThat(n, is(2));
assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount(), is(2));
}
/**
* @see DATAMONGO-934
*/
@Test
public void insertOrderedFails() {
// given
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
bulkOps = createBulkOps(BulkMode.ORDERED);
// when
try {
bulkOps.insert(documents).executeBulk();
createBulkOps(BulkMode.ORDERED).insert(documents).execute();
fail();
} catch (BulkOperationException e) {
// then
assertThat(e.getResult().getInsertedCount(), is(1)); // fails after first error
assertThat(e.getErrors(), notNullValue());
assertThat(e.getErrors().size(), is(1));
}
}
/**
* @see DATAMONGO-934
*/
@Test
public void insertUnOrdered() {
// given
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
bulkOps = createBulkOps(BulkMode.UNORDERED);
// when
int n = bulkOps.insert(documents).executeBulk().getInsertedCount();
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
// then
assertThat(n, is(2));
assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount(), is(2));
}
/**
* @see DATAMONGO-934
*/
@Test
public void insertUnOrderedContinuesOnError() {
// given
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
bulkOps = createBulkOps(BulkMode.UNORDERED);
// when
try {
bulkOps.insert(documents).executeBulk();
createBulkOps(BulkMode.UNORDERED).insert(documents).execute();
fail();
} catch (BulkOperationException e) {
// then
assertThat(e.getResult().getInsertedCount(), is(2)); // two docs were inserted
assertThat(e.getErrors(), notNullValue());
assertThat(e.getErrors().size(), is(1));
}
}
/**
* @see DATAMONGO-934
*/
@Test
public void upsertDoesUpdate() {
// given
bulkOps = createBulkOps(BulkMode.ORDERED);
insertSomeDocuments();
// when
BulkWriteResult result = bulkOps.upsert(where("value", "value1"), set("value", "value2")).executeBulk();
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
upsert(where("value", "value1"), set("value", "value2")).//
execute();
// then
assertThat(result, notNullValue());
assertThat(result.getMatchedCount(), is(2));
assertThat(result.getModifiedCount(), is(2));
assertThat(result.getInsertedCount(), is(0));
assertThat(result.getUpserts(), notNullValue());
assertThat(result.getUpserts(), is(notNullValue()));
assertThat(result.getUpserts().size(), is(0));
}
/**
* @see DATAMONGO-934
*/
@Test
public void upsertDoesInsert() {
// given
bulkOps = createBulkOps(BulkMode.ORDERED);
// when
BulkWriteResult result = bulkOps.upsert(where("_id", "1"), set("value", "v1")).executeBulk();
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
upsert(where("_id", "1"), set("value", "v1")).//
execute();
// then
assertThat(result, notNullValue());
assertThat(result.getMatchedCount(), is(0));
assertThat(result.getModifiedCount(), is(0));
assertThat(result.getUpserts(), notNullValue());
assertThat(result.getUpserts(), is(notNullValue()));
assertThat(result.getUpserts().size(), is(1));
}
/**
* @see DATAMONGO-934
*/
@Test
public void updateOneOrdered() {
testUpdate(BulkMode.ORDERED, false, 2);
}
/**
* @see DATAMONGO-934
*/
@Test
public void updateMultiOrdered() {
testUpdate(BulkMode.ORDERED, true, 4);
}
/**
* @see DATAMONGO-934
*/
@Test
public void updateOneUnOrdered() {
testUpdate(BulkMode.UNORDERED, false, 2);
}
/**
* @see DATAMONGO-934
*/
@Test
public void updateMultiUnOrdered() {
testUpdate(BulkMode.UNORDERED, true, 4);
}
/**
* @see DATAMONGO-934
*/
@Test
public void removeOrdered() {
testRemove(BulkMode.ORDERED);
}
/**
* @see DATAMONGO-934
*/
@Test
public void removeUnordered() {
testRemove(BulkMode.UNORDERED);
@ -196,17 +233,17 @@ public class DefaultBulkOperationsIntegrationTests { @@ -196,17 +233,17 @@ public class DefaultBulkOperationsIntegrationTests {
/**
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
*
* @see DATAMONGO-934
*/
@Test
public void mixedBulkOrdered() {
// given
bulkOps = createBulkOps(BulkMode.ORDERED);
// when
BulkWriteResult result = bulkOps.insert(newDoc("1", "v1")).updateOne(where("_id", "1"), set("value", "v2"))
.remove(where("value", "v2")).executeBulk();
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(newDoc("1", "v1")).//
updateOne(where("_id", "1"), set("value", "v2")).//
remove(where("value", "v2")).//
execute();
// then
assertThat(result, notNullValue());
assertThat(result.getInsertedCount(), is(1));
assertThat(result.getModifiedCount(), is(1));
@ -217,18 +254,16 @@ public class DefaultBulkOperationsIntegrationTests { @@ -217,18 +254,16 @@ public class DefaultBulkOperationsIntegrationTests {
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
*/
@Test
@SuppressWarnings("unchecked")
public void mixedBulkOrderedWithList() {
// given
bulkOps = createBulkOps(BulkMode.ORDERED);
List<BaseDoc> inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2"));
List<Tuple<Query, Update>> updates = new ArrayList<Tuple<Query, Update>>();
updates.add(new Tuple<Query, Update>(where("value", "v2"), set("value", "v3")));
List<Tuple<Query, Update>> updates = Arrays.asList(Tuple.of(where("value", "v2"), set("value", "v3")));
List<Query> removes = Arrays.asList(where("_id", "1"));
// when
BulkWriteResult result = bulkOps.insert(inserts).updateMulti(updates).remove(removes).executeBulk();
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(inserts).updateMulti(updates).remove(removes)
.execute();
// then
assertThat(result, notNullValue());
assertThat(result.getInsertedCount(), is(3));
assertThat(result.getModifiedCount(), is(2));
@ -236,44 +271,41 @@ public class DefaultBulkOperationsIntegrationTests { @@ -236,44 +271,41 @@ public class DefaultBulkOperationsIntegrationTests {
}
private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) {
// given
bulkOps = createBulkOps(mode);
BulkOperations bulkOps = createBulkOps(mode);
insertSomeDocuments();
List<Tuple<Query, Update>> updates = new ArrayList<Tuple<Query, Update>>();
updates.add(new Tuple<Query, Update>(where("value", "value1"), set("value", "value3")));
updates.add(new Tuple<Query, Update>(where("value", "value2"), set("value", "value4")));
// when
int n;
if (multi) {
n = bulkOps.updateMulti(updates).executeBulk().getModifiedCount();
} else {
n = bulkOps.updateOne(updates).executeBulk().getModifiedCount();
}
updates.add(Tuple.of(where("value", "value1"), set("value", "value3")));
updates.add(Tuple.of(where("value", "value2"), set("value", "value4")));
int modifiedCount = multi ? bulkOps.updateMulti(updates).execute().getModifiedCount()
: bulkOps.updateOne(updates).execute().getModifiedCount();
// then
assertThat(n, is(expectedUpdates));
assertThat(modifiedCount, is(expectedUpdates));
}
private void testRemove(BulkMode mode) {
// given
bulkOps = createBulkOps(mode);
insertSomeDocuments();
List<Query> removes = Arrays.asList(where("_id", "1"), where("value", "value2"));
// when
int n = bulkOps.remove(removes).executeBulk().getRemovedCount();
List<Query> removes = Arrays.asList(where("_id", "1"), where("value", "value2"));
// then
assertThat(n, is(3));
assertThat(createBulkOps(mode).remove(removes).execute().getRemovedCount(), is(3));
}
private BulkOperations createBulkOps(BulkMode mode) {
return new DefaultBulkOperations(template, mode, COLLECTION_NAME, WriteConcern.ACKNOWLEDGED);
DefaultBulkOperations operations = new DefaultBulkOperations(this.operations, mode, COLLECTION_NAME, null);
operations.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED);
return operations;
}
private void insertSomeDocuments() {
final DBCollection coll = template.getCollection(COLLECTION_NAME);
final DBCollection coll = operations.getCollection(COLLECTION_NAME);
coll.insert(rawDoc("1", "value1"));
coll.insert(rawDoc("2", "value1"));
@ -282,15 +314,16 @@ public class DefaultBulkOperationsIntegrationTests { @@ -282,15 +314,16 @@ public class DefaultBulkOperationsIntegrationTests {
}
private static BaseDoc newDoc(String id) {
final BaseDoc doc = new BaseDoc();
BaseDoc doc = new BaseDoc();
doc.id = id;
return doc;
}
private static BaseDoc newDoc(String id, String value) {
final BaseDoc doc = newDoc(id);
BaseDoc doc = newDoc(id);
doc.value = value;
return doc;
@ -301,19 +334,10 @@ public class DefaultBulkOperationsIntegrationTests { @@ -301,19 +334,10 @@ public class DefaultBulkOperationsIntegrationTests {
}
private static Update set(String field, String value) {
Update u = new Update();
u.set(field, value);
return u;
return new Update().set(field, value);
}
private static DBObject rawDoc(String id, String value) {
final DBObject o = new BasicDBObject();
o.put("_id", id);
o.put("value", value);
return o;
return new BasicDBObject("_id", id).append("value", value);
}
}

Loading…
Cancel
Save