diff --git a/android/examples/todo-sync/src/main/java/com/mongodb/stitch/android/examples/todosync/TodoListActivity.java b/android/examples/todo-sync/src/main/java/com/mongodb/stitch/android/examples/todosync/TodoListActivity.java index 15bb20d32..fd3c0974b 100644 --- a/android/examples/todo-sync/src/main/java/com/mongodb/stitch/android/examples/todosync/TodoListActivity.java +++ b/android/examples/todo-sync/src/main/java/com/mongodb/stitch/android/examples/todosync/TodoListActivity.java @@ -39,9 +39,9 @@ import com.mongodb.stitch.android.services.mongodb.remote.RemoteMongoCollection; import com.mongodb.stitch.core.auth.providers.serverapikey.ServerApiKeyCredential; import com.mongodb.stitch.core.internal.common.BsonUtils; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.DefaultSyncConflictResolvers; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.internal.ChangeEvent; import java.util.ArrayList; @@ -53,7 +53,6 @@ import org.bson.BsonObjectId; import org.bson.BsonRegularExpression; -import org.bson.BsonString; import org.bson.BsonValue; import org.bson.Document; import org.bson.codecs.configuration.CodecRegistries; @@ -127,7 +126,7 @@ public void updateChecked(final ObjectId itemId, final boolean isChecked) { updateDoc.append("$unset", new Document(TodoItem.Fields.DONE_DATE, "")); } - items.sync().updateOneById(new BsonObjectId(itemId), updateDoc); + items.sync().updateOne(new Document("_id", itemId), updateDoc); } @Override @@ -358,8 +357,8 @@ private void addTodoItem(final String task) { private void updateTodoItemTask(final ObjectId itemId, final String newTask) { final BsonObjectId docId = new BsonObjectId(itemId); - items.sync().updateOneById( - docId, + items.sync().updateOne( + new Document("_id", docId), new Document("$set", new Document(TodoItem.Fields.TASK, newTask))) .addOnSuccessListener(result -> { items.sync().find(new Document("_id", docId)).first() @@ -375,11 +374,11 @@ private void updateTodoItemTask(final ObjectId itemId, final String newTask) { } private void clearCheckedTodoItems() { - final List> tasks = new ArrayList<>(); + final List> tasks = new ArrayList<>(); getItems().addOnSuccessListener(todoItems -> { for (final TodoItem item : todoItems) { if (item.isChecked()) { - tasks.add(items.sync().deleteOneById(new BsonObjectId(item.getId()))); + tasks.add(items.sync().deleteOne(new Document("_id", item.getId()))); } } Tasks.whenAllComplete(tasks) @@ -388,10 +387,10 @@ private void clearCheckedTodoItems() { } private void clearAllTodoItems() { - final List> tasks = new ArrayList<>(); + final List> tasks = new ArrayList<>(); getItems().addOnSuccessListener(todoItems -> { for (final TodoItem item : todoItems) { - tasks.add(items.sync().deleteOneById(new BsonObjectId(item.getId()))); + tasks.add(items.sync().deleteOne(new Document("_id", item.getId()))); } Tasks.whenAllComplete(tasks) .addOnCompleteListener(task -> todoAdapter.clearItems()); @@ -399,6 +398,6 @@ private void clearAllTodoItems() { } private void touchList() { - lists.sync().updateOneById(new BsonString(userId), new Document("$inc", new Document("i", 1))); + lists.sync().updateOne(new Document("_id", userId), new Document("$inc", new Document("i", 1))); } } diff --git a/android/services/mongodb-remote/src/androidTest/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncMongoClientIntTests.kt b/android/services/mongodb-remote/src/androidTest/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncMongoClientIntTests.kt index 9675e4b08..956a769cf 100644 --- a/android/services/mongodb-remote/src/androidTest/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncMongoClientIntTests.kt +++ b/android/services/mongodb-remote/src/androidTest/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncMongoClientIntTests.kt @@ -20,6 +20,11 @@ import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.sync.internal.DataSynchronizer import com.mongodb.stitch.core.testutils.BaseStitchIntTest import com.mongodb.stitch.core.testutils.sync.ProxyRemoteMethods @@ -71,20 +76,36 @@ class SyncMongoClientIntTests : BaseStitchAndroidIntTest(), SyncIntTestRunner { sync.syncOne(id) } - override fun insertOneAndSync(document: Document): RemoteInsertOneResult { + override fun count(filter: Bson): Long { + return Tasks.await(sync.count(filter)) + } + + override fun aggregate(pipeline: List): Iterable { + return Tasks.await(sync.aggregate(pipeline).into(mutableListOf())) + } + + override fun insertOneAndSync(document: Document): SyncInsertOneResult { return Tasks.await(sync.insertOneAndSync(document)) } - override fun findOneById(id: BsonValue): Document? { - return Tasks.await(sync.findOneById(id)) + override fun insertManyAndSync(documents: List): SyncInsertManyResult { + return Tasks.await(sync.insertManyAndSync(documents)) + } + + override fun updateOne(filter: Bson, update: Bson, updateOptions: SyncUpdateOptions): SyncUpdateResult { + return Tasks.await(sync.updateOne(filter, update, updateOptions)) } - override fun updateOneById(documentId: BsonValue, update: Bson): RemoteUpdateResult { - return Tasks.await(sync.updateOneById(documentId, update)) + override fun updateMany(filter: Bson, update: Bson, updateOptions: SyncUpdateOptions): SyncUpdateResult { + return Tasks.await(sync.updateMany(filter, update, updateOptions)) } - override fun deleteOneById(documentId: BsonValue): RemoteDeleteResult { - return Tasks.await(sync.deleteOneById(documentId)) + override fun deleteOne(filter: Bson): SyncDeleteResult { + return Tasks.await(sync.deleteOne(filter)) + } + + override fun deleteMany(filter: Bson): SyncDeleteResult { + return Tasks.await(sync.deleteMany(filter)) } override fun desyncOne(id: BsonValue) { @@ -277,11 +298,6 @@ class SyncMongoClientIntTests : BaseStitchAndroidIntTest(), SyncIntTestRunner { testProxy.testInsertInsertConflict() } - @Test - override fun testPausedDocumentConfig() { - testProxy.testPausedDocumentConfig() - } - @Test override fun testConfigure() { } @@ -321,6 +337,26 @@ class SyncMongoClientIntTests : BaseStitchAndroidIntTest(), SyncIntTestRunner { testProxy.testResumeSyncForDocumentResumesSync() } + @Test + override fun testReadsBeforeAndAfterSync() { + testProxy.testReadsBeforeAndAfterSync() + } + + @Test + override fun testInsertManyNoConflicts() { + testProxy.testInsertManyNoConflicts() + } + + @Test + override fun testUpdateManyNoConflicts() { + testProxy.testUpdateManyNoConflicts() + } + + @Test + override fun testDeleteManyNoConflicts() { + testProxy.testDeleteManyNoConflicts() + } + /** * Get the uri for where mongodb is running locally. */ diff --git a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/Sync.java b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/Sync.java index b55e8b60d..fd16cb17c 100644 --- a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/Sync.java +++ b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/Sync.java @@ -20,13 +20,17 @@ import android.support.annotation.Nullable; import com.google.android.gms.tasks.Task; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler; import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; +import java.util.List; import java.util.Set; import org.bson.BsonValue; @@ -103,14 +107,40 @@ void configure(@NonNull final ConflictHandler conflictHandler, boolean resumeSyncForDocument(@NonNull final BsonValue documentId); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Counts the number of documents in the collection that have been synchronized with the remote. + * + * @return the number of documents in the collection + */ + Task count(); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @return the number of documents in the collection + */ + Task count(final Bson filter); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @param options the options describing the count + * @return the number of documents in the collection + */ + Task count(final Bson filter, final SyncCountOptions options); + + /** + * Finds all documents in the collection that have been synchronized with the remote. * * @return the find iterable interface */ SyncFindIterable find(); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param resultClass the class to decode each document into * @param the target document type of the iterable. @@ -119,7 +149,7 @@ void configure(@NonNull final ConflictHandler conflictHandler, SyncFindIterable find(final Class resultClass); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @return the find iterable interface @@ -127,59 +157,130 @@ void configure(@NonNull final ConflictHandler conflictHandler, SyncFindIterable find(final Bson filter); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @param resultClass the class to decode each document into * @param the target document type of the iterable. * @return the find iterable interface */ - SyncFindIterable find(final Bson filter, final Class resultClass); + SyncFindIterable find( + final Bson filter, + final Class resultClass); + /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. - * @return a task containing the document if found locally or remotely. + * @param pipeline the aggregation pipeline + * @return an iterable containing the result of the aggregation operation */ - Task findOneById(final BsonValue documentId); + SyncAggregateIterable aggregate(final List pipeline); /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. + * @param pipeline the aggregation pipeline * @param resultClass the class to decode each document into * @param the target document type of the iterable. - * @return a task containing the document if found locally or remotely. + * @return an iterable containing the result of the aggregation operation + */ + SyncAggregateIterable aggregate( + final List pipeline, + final Class resultClass); + + /** + * Inserts the provided document. If the document is missing an identifier, one will be + * generated. Begin synchronizating on the document's id. + * + * @param document the document to insert + * @return the result of the insert one operation + */ + Task insertOneAndSync(final DocumentT document); + + /** + * Inserts one or more documents. If the documents are missing an identifier, they will be + * generated. Begin synchronizing on the documents' ids. + * + * @param documents the documents to insert + * @return the result of the insert many operation + */ + Task insertManyAndSync(final List documents); + + /** + * Removes at most one document that has been synchronized with the remote + * from the collection that matches the given filter. If no + * documents match, the collection is not modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove one operation + */ + Task deleteOne(final Bson filter); + + /** + * Removes all documents from the collection that have been synchronized with the remote + * that match the given query filter. If no documents match, the collection is not modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove many operation + */ + Task deleteMany(final Bson filter); + + /** + * Update a single document that has been synchronized with the remote + * in the collection according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update one operation */ - Task findOneById(final BsonValue documentId, final Class resultClass); + Task updateOne(final Bson filter, final Bson update); /** - * Updates a document by the given id. It is first searched for in the local synchronized cache - * and if not found and there is internet connectivity, it is searched for remotely. + * Update a single document that has been synchronized with the remote + * in the collection according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @param update the update specifier. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update one operation */ - Task updateOneById(final BsonValue documentId, final Bson update); + Task updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); /** - * Inserts a single document and begins to synchronize it. + * Update all documents that have been synchronized with the remote + * in the collection according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param document the document to insert and synchronize. - * @return the result of the insertion. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update many operation */ - Task insertOneAndSync(final DocumentT document); + Task updateMany(final Bson filter, final Bson update); /** - * Deletes a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Update all documents that have been synchronized with the remote + * in the collection according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update many operation */ - Task deleteOneById(final BsonValue documentId); + Task updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); } diff --git a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/SyncAggregateIterable.java b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/SyncAggregateIterable.java new file mode 100644 index 000000000..d9708841a --- /dev/null +++ b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/SyncAggregateIterable.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.android.services.mongodb.remote; + +/** + * Iterable for aggregate. + * + * @param The type of the result. + */ +public interface SyncAggregateIterable extends RemoteMongoIterable { +} diff --git a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncAggregateIterableImpl.java b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncAggregateIterableImpl.java new file mode 100644 index 000000000..8f71e568a --- /dev/null +++ b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncAggregateIterableImpl.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.android.services.mongodb.remote.internal; + +import com.mongodb.stitch.android.core.internal.common.TaskDispatcher; +import com.mongodb.stitch.android.services.mongodb.remote.SyncAggregateIterable; +import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSyncAggregateIterable; + +class SyncAggregateIterableImpl + extends RemoteMongoIterableImpl + implements SyncAggregateIterable { + SyncAggregateIterableImpl( + final CoreSyncAggregateIterable iterable, + final TaskDispatcher dispatcher + ) { + super(iterable, dispatcher); + } +} diff --git a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncImpl.java b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncImpl.java index 1ec75b363..5cce6d436 100644 --- a/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncImpl.java +++ b/android/services/mongodb-remote/src/main/java/com/mongodb/stitch/android/services/mongodb/remote/internal/SyncImpl.java @@ -22,18 +22,24 @@ import com.google.android.gms.tasks.Task; import com.mongodb.stitch.android.core.internal.common.TaskDispatcher; import com.mongodb.stitch.android.services.mongodb.remote.Sync; +import com.mongodb.stitch.android.services.mongodb.remote.SyncAggregateIterable; import com.mongodb.stitch.android.services.mongodb.remote.SyncFindIterable; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler; import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSync; import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener; - +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; + +import java.util.List; import java.util.Set; import java.util.concurrent.Callable; +import org.bson.BsonDocument; import org.bson.BsonValue; import org.bson.conversions.Bson; @@ -89,6 +95,37 @@ public boolean resumeSyncForDocument(@NonNull final BsonValue documentId) { return this.proxy.resumeSyncForDocument(documentId); } + @Override + public Task count() { + return this.count(new BsonDocument()); + } + + @Override + public Task count(final Bson filter) { + return this.count(filter, new SyncCountOptions()); + } + + @Override + public Task count(final Bson filter, final SyncCountOptions options) { + return this.dispatcher.dispatchTask(new Callable() { + @Override + public Long call() throws Exception { + return proxy.count(filter, options); + } + }); + } + + @Override + public SyncAggregateIterable aggregate(final List pipeline) { + return new SyncAggregateIterableImpl<>(this.proxy.aggregate(pipeline), dispatcher); + } + + @Override + public SyncAggregateIterable aggregate(final List pipeline, + final Class resultClass) { + return new SyncAggregateIterableImpl<>(this.proxy.aggregate(pipeline, resultClass), dispatcher); + } + @Override public SyncFindIterable find() { return new SyncFindIterableImpl<>(proxy.find(), dispatcher); @@ -111,52 +148,79 @@ public SyncFindIterable find(final Bson filter, } @Override - public Task findOneById(final BsonValue documentId) { - return this.dispatcher.dispatchTask(new Callable() { + public Task insertOneAndSync(final DocumentT document) { + return this.dispatcher.dispatchTask(new Callable() { + @Override + public SyncInsertOneResult call() throws Exception { + return proxy.insertOneAndSync(document); + } + }); + } + + @Override + public Task insertManyAndSync(final List documents) { + return this.dispatcher.dispatchTask(new Callable() { @Override - public DocumentT call() throws Exception { - return proxy.findOneById(documentId); + public SyncInsertManyResult call() throws Exception { + return proxy.insertManyAndSync(documents); } }); } @Override - public Task findOneById(final BsonValue documentId, - final Class resultClass) { - return this.dispatcher.dispatchTask(new Callable() { + public Task updateOne(final Bson filter, final Bson update) { + return this.updateOne(filter, update, new SyncUpdateOptions()); + } + + @Override + public Task updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions + ) { + return this.dispatcher.dispatchTask(new Callable() { @Override - public ResultT call() throws Exception { - return proxy.findOneById(documentId, resultClass); + public SyncUpdateResult call() throws Exception { + return proxy.updateOne(filter, update, updateOptions); } }); } @Override - public Task deleteOneById(final BsonValue documentId) { - return this.dispatcher.dispatchTask(new Callable() { + public Task updateMany(final Bson filter, final Bson update) { + return this.updateMany(filter, update, new SyncUpdateOptions()); + } + + @Override + public Task updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions + ) { + return this.dispatcher.dispatchTask(new Callable() { @Override - public RemoteDeleteResult call() throws Exception { - return proxy.deleteOneById(documentId); + public SyncUpdateResult call() throws Exception { + return proxy.updateMany(filter, update, updateOptions); } }); } @Override - public Task insertOneAndSync(final DocumentT document) { - return this.dispatcher.dispatchTask(new Callable() { + public Task deleteOne(final Bson filter) { + return this.dispatcher.dispatchTask(new Callable() { @Override - public RemoteInsertOneResult call() throws Exception { - return proxy.insertOneAndSync(document); + public SyncDeleteResult call() throws Exception { + return proxy.deleteOne(filter); } }); } @Override - public Task updateOneById(final BsonValue documentId, final Bson update) { - return this.dispatcher.dispatchTask(new Callable() { + public Task deleteMany(final Bson filter) { + return this.dispatcher.dispatchTask(new Callable() { @Override - public RemoteUpdateResult call() throws Exception { - return proxy.updateOneById(documentId, update); + public SyncDeleteResult call() throws Exception { + return proxy.deleteMany(filter); } }); } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteAggregateIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteAggregateIterableImpl.java index 2b7c462e3..1f5de014e 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteAggregateIterableImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteAggregateIterableImpl.java @@ -38,6 +38,6 @@ class CoreRemoteAggregateIterableImpl } Operation> asOperation() { - return getOperations().aggregate(pipeline, geResultClass()); + return getOperations().aggregate(pipeline, getResultClass()); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteFindIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteFindIterableImpl.java index 301be9bec..c51169bff 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteFindIterableImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteFindIterableImpl.java @@ -95,7 +95,7 @@ public CoreRemoteFindIterableImpl sort(@Nullable final Bson @Override public ResultT first() { final Iterator iter = getOperations() - .findFirst(filter, geResultClass(), findOptions) + .findFirst(filter, getResultClass(), findOptions) .execute(getService()) .iterator(); return iter.hasNext() ? iter.next() : null; @@ -103,6 +103,6 @@ public ResultT first() { @Override Operation> asOperation() { - return getOperations().find(filter, geResultClass(), findOptions); + return getOperations().find(filter, getResultClass(), findOptions); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteMongoIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteMongoIterableImpl.java index 7c5cd2eed..b36ad0109 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteMongoIterableImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/internal/CoreRemoteMongoIterableImpl.java @@ -48,7 +48,7 @@ CoreStitchServiceClient getService() { return service; } - Class geResultClass() { + Class getResultClass() { return resultClass; } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSync.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSync.java index b45b8710f..662b5cb9a 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSync.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSync.java @@ -16,10 +16,7 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; - +import java.util.List; import java.util.Set; import javax.annotation.Nonnull; @@ -99,14 +96,40 @@ void configure(@Nonnull final ConflictHandler conflictHandler, boolean resumeSyncForDocument(final BsonValue documentId); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Counts the number of documents in the collection that have been synchronized with the remote. + * + * @return the number of documents in the collection + */ + long count(); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @return the number of documents in the collection + */ + long count(final Bson filter); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @param options the options describing the count + * @return the number of documents in the collection + */ + long count(final Bson filter, final SyncCountOptions options); + + /** + * Finds all documents in the collection that have been synchronized with the remote. * * @return the find iterable interface */ CoreSyncFindIterable find(); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param resultClass the class to decode each document into * @param the target document type of the iterable. @@ -115,7 +138,7 @@ void configure(@Nonnull final ConflictHandler conflictHandler, CoreSyncFindIterable find(final Class resultClass); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @return the find iterable interface @@ -123,59 +146,129 @@ void configure(@Nonnull final ConflictHandler conflictHandler, CoreSyncFindIterable find(final Bson filter); /** - * Finds all documents in the collection that have been synchronized from the remote. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @param resultClass the class to decode each document into * @param the target document type of the iterable. * @return the find iterable interface */ - CoreSyncFindIterable find(final Bson filter, final Class resultClass); + CoreSyncFindIterable find( + final Bson filter, + final Class resultClass); + /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. - * @return a task containing the document if found locally or remotely. + * @param pipeline the aggregation pipeline + * @return an iterable containing the result of the aggregation operation */ - DocumentT findOneById(final BsonValue documentId); + CoreSyncAggregateIterable aggregate(final List pipeline); /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. + * @param pipeline the aggregation pipeline * @param resultClass the class to decode each document into * @param the target document type of the iterable. - * @return a task containing the document if found locally or remotely. + * @return an iterable containing the result of the aggregation operation + */ + CoreSyncAggregateIterable aggregate( + final List pipeline, + final Class resultClass); + + /** + * Inserts the provided document. If the document is missing an identifier, the client should + * generate one. Syncs the newly inserted document against the remote. + * + * @param document the document to insert + * @return the result of the insert one operation + */ + SyncInsertOneResult insertOneAndSync(final DocumentT document); + + /** + * Inserts one or more documents. Syncs the newly inserted documents against the remote. + * + * @param documents the documents to insert + * @return the result of the insert many operation + */ + SyncInsertManyResult insertManyAndSync(final List documents); + + /** + * Removes at most one document from the collection that has been synchronized with the remote + * that matches the given filter. If no documents match, the collection is not + * modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove one operation + */ + SyncDeleteResult deleteOne(final Bson filter); + + /** + * Removes all documents from the collection that have been synchronized with the remote + * that match the given query filter. If no documents match, the collection is not modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove many operation + */ + SyncDeleteResult deleteMany(final Bson filter); + + /** + * Update a single document in the collection that have been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update one operation */ - ResultT findOneById(final BsonValue documentId, final Class resultClass); + SyncUpdateResult updateOne(final Bson filter, final Bson update); /** - * Updates a document by the given id. It is first searched for in the local synchronized cache - * and if not found and there is internet connectivity, it is searched for remotely. + * Update a single document in the collection that has been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @param update the update specifier. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update one operation */ - RemoteUpdateResult updateOneById(final BsonValue documentId, final Bson update); + SyncUpdateResult updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); /** - * Inserts a single document and begins to synchronize it. + * Update all documents in the collection that have been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param document the document to insert and synchronize. - * @return the result of the insertion. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update many operation */ - RemoteInsertOneResult insertOneAndSync(final DocumentT document); + SyncUpdateResult updateMany(final Bson filter, final Bson update); /** - * Deletes a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Update all documents in the collection that have been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update many operation */ - RemoteDeleteResult deleteOneById(final BsonValue documentId); + SyncUpdateResult updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSyncAggregateIterable.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSyncAggregateIterable.java new file mode 100644 index 000000000..95a245d88 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/CoreSyncAggregateIterable.java @@ -0,0 +1,27 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.internal.CoreRemoteAggregateIterable; + +/** + * Iterable for aggregate. + * + * @param The type of the result. + */ +public interface CoreSyncAggregateIterable extends CoreRemoteAggregateIterable { +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncCountOptions.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncCountOptions.java new file mode 100644 index 000000000..cb7eed6c8 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncCountOptions.java @@ -0,0 +1,29 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteCountOptions; + +/** + * The options for a count operation. + */ +public class SyncCountOptions extends RemoteCountOptions { + @Override + public SyncCountOptions limit(final int limit) { + return (SyncCountOptions)super.limit(limit); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncDeleteResult.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncDeleteResult.java new file mode 100644 index 000000000..96f6c215a --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncDeleteResult.java @@ -0,0 +1,33 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; + +/** + * The result of a delete operation. + */ +public class SyncDeleteResult extends RemoteDeleteResult { + /** + * Constructs a result. + * + * @param deletedCount the number of documents deleted. + */ + public SyncDeleteResult(final long deletedCount) { + super(deletedCount); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertManyResult.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertManyResult.java new file mode 100644 index 000000000..9d1d254b9 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertManyResult.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertManyResult; + +import java.util.Map; + +import org.bson.BsonValue; + +/** + * The result of an insert many operation. + */ +public class SyncInsertManyResult extends RemoteInsertManyResult { + /** + * Constructs a result. + * + * @param insertedIds the _ids of the inserted documents arranged by the index of the document + * from the operation and its corresponding id. + */ + public SyncInsertManyResult(final Map insertedIds) { + super(insertedIds); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertOneResult.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertOneResult.java new file mode 100644 index 000000000..0721a51d7 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncInsertOneResult.java @@ -0,0 +1,35 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; + +import org.bson.BsonValue; + +/** + * The result of an insert one operation. + */ +public class SyncInsertOneResult extends RemoteInsertOneResult { + /** + * Constructs a result. + * + * @param insertedId the _id of the inserted document. + */ + public SyncInsertOneResult(final BsonValue insertedId) { + super(insertedId); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateOptions.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateOptions.java new file mode 100644 index 000000000..575bf4fca --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateOptions.java @@ -0,0 +1,29 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateOptions; + +/** + * The options to apply when updating documents. + */ +public class SyncUpdateOptions extends RemoteUpdateOptions { + @Override + public SyncUpdateOptions upsert(final boolean upsert) { + return (SyncUpdateOptions)super.upsert(upsert); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateResult.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateResult.java new file mode 100644 index 000000000..04a0467d4 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/SyncUpdateResult.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync; + +import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; + +import org.bson.BsonValue; + +/** + * The result of an update operation. + */ +public class SyncUpdateResult extends RemoteUpdateResult { + /** + * Constructs a result. + * + * @param matchedCount the number of documents matched by the query. + * @param modifiedCount the number of documents modified. + * @param upsertedId the _id of the inserted document if the replace resulted in an inserted + * document, otherwise null. + */ + public SyncUpdateResult( + final long matchedCount, + final long modifiedCount, + final BsonValue upsertedId + ) { + super(matchedCount, modifiedCount, upsertedId); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/AggregateOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/AggregateOperation.java new file mode 100644 index 000000000..11f655ce7 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/AggregateOperation.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; + +import com.mongodb.MongoNamespace; +import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; +import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import javax.annotation.Nullable; + +import org.bson.conversions.Bson; + +class AggregateOperation implements Operation> { + private final MongoNamespace namespace; + private final DataSynchronizer dataSynchronizer; + private final List pipeline; + private final Class resultClass; + + AggregateOperation( + final MongoNamespace namespace, + final DataSynchronizer dataSynchronizer, + final List pipeline, + final Class resultClass + ) { + this.namespace = namespace; + this.dataSynchronizer = dataSynchronizer; + this.pipeline = pipeline; + this.resultClass = resultClass; + } + + public Collection execute(@Nullable final CoreStitchServiceClient service) { + return this.dataSynchronizer.aggregate( + namespace, pipeline, resultClass + ).into(new ArrayList<>()); + } +} + diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEvent.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEvent.java index 1bb8b0208..e89989650 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEvent.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEvent.java @@ -28,6 +28,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; + +import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.bson.BsonArray; import org.bson.BsonBoolean; @@ -195,8 +197,8 @@ BsonDocument toUpdateDocument() { * @return a description of the updated fields and removed keys between * the documents */ - private static UpdateDescription diff(final BsonDocument beforeDocument, - final BsonDocument afterDocument, + private static UpdateDescription diff(final @Nonnull BsonDocument beforeDocument, + final @Nonnull BsonDocument afterDocument, final @Nullable String onKey, final BsonDocument updatedFields, final List removedFields) { @@ -245,7 +247,7 @@ private static UpdateDescription diff(final BsonDocument beforeDocument, // it is a new key with a new value. // updatedFields will included keys that must // be newly created. - final String actualKey = onKey == null ? key : String.format("%s.%s", onKey, key);; + final String actualKey = onKey == null ? key : String.format("%s.%s", onKey, key); if (!beforeDocument.containsKey(key)) { updatedFields.put(actualKey, newValue); } @@ -266,8 +268,12 @@ private static UpdateDescription diff(final BsonDocument beforeDocument, * @return a description of the updated fields and removed keys between * the documents */ - static UpdateDescription diff(final BsonDocument beforeDocument, - final BsonDocument afterDocument) { + static UpdateDescription diff(@Nullable final BsonDocument beforeDocument, + @Nullable final BsonDocument afterDocument) { + if (beforeDocument == null || afterDocument == null) { + return new UpdateDescription(new BsonDocument(), new ArrayList<>()); + } + return UpdateDescription.diff( beforeDocument, afterDocument, diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncAggregateIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncAggregateIterableImpl.java new file mode 100644 index 000000000..1b9c62c3c --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncAggregateIterableImpl.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; + +import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; +import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSyncAggregateIterable; + +import java.util.Collection; +import java.util.List; + +import org.bson.conversions.Bson; + +class CoreSyncAggregateIterableImpl + extends CoreSyncMongoIterableImpl, ResultT> + implements CoreSyncAggregateIterable { + + private final List pipeline; + + CoreSyncAggregateIterableImpl( + final List pipeline, + final Class resultClass, + final CoreStitchServiceClient service, + final SyncOperations operations + ) { + super(service, resultClass, operations); + this.pipeline = pipeline; + } + + @Override + Operation> asOperation() { + return getOperations().aggregate(pipeline, getResultClass()); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncFindIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncFindIterableImpl.java index d924f3156..b59afaad4 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncFindIterableImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncFindIterableImpl.java @@ -98,7 +98,7 @@ public CoreSyncFindIterableImpl sort(@Nullable final Bson so @Override public ResultT first() { final Iterator iter = getOperations() - .findFirst(filter, geResultClass(), findOptions) + .findFirst(filter, getResultClass(), findOptions) .execute(getService()) .iterator(); return iter.hasNext() ? iter.next() : null; @@ -106,6 +106,6 @@ public ResultT first() { @Override Operation> asOperation() { - return getOperations().find(filter, geResultClass(), findOptions); + return getOperations().find(filter, getResultClass(), findOptions); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncImpl.java index 8d91c562c..7f62d8293 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncImpl.java @@ -18,15 +18,20 @@ import com.mongodb.MongoNamespace; import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler; import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSync; +import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSyncAggregateIterable; import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSyncFindIterable; import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener; - +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; + +import java.util.List; import java.util.Set; import javax.annotation.Nonnull; @@ -107,64 +112,82 @@ public boolean resumeSyncForDocument(final BsonValue documentId) { return this.dataSynchronizer.resumeSyncForDocument(namespace, documentId); } - /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @return the document if found locally or remotely. - */ - @Nullable - public DocumentT findOneById(final BsonValue documentId) { - return findOneById(documentId, this.documentClass); + @Override + public long count() { + return count(new BsonDocument()); } - /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @param resultClass the class to decode each document into - * @param the target document type of the iterable. - * @return the document if found locally or remotely. - */ - @Nullable - public ResultT findOneById(final BsonValue documentId, - final Class resultClass) { - return syncOperations.findOneById(documentId, resultClass).execute(service); + @Override + public long count(final Bson filter) { + return count(filter, new SyncCountOptions()); } - /** - * Updates a document by the given id. It is first searched for in the local synchronized cache - * and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @param update the update specifier. - * @return the result of the local or remote update. - */ - public RemoteUpdateResult updateOneById(final BsonValue documentId, final Bson update) { - return syncOperations.updateOneById(documentId, update).execute(service); + @Override + public long count(final Bson filter, final SyncCountOptions options) { + return syncOperations.count(filter, options).execute(service); } - /** - * Inserts a single document and begins to synchronize it. - * - * @param document the document to insert and synchronize. - * @return the result of the insertion. - */ - public RemoteInsertOneResult insertOneAndSync(final DocumentT document) { + @Override + public CoreSyncAggregateIterable aggregate(final List pipeline) { + return this.aggregate(pipeline, this.documentClass); + } + + @Override + public CoreSyncAggregateIterable aggregate( + final List pipeline, + final Class resultClass + ) { + return new CoreSyncAggregateIterableImpl<>( + pipeline, resultClass, service, syncOperations + ); + } + + @Override + public SyncUpdateResult updateOne(final Bson filter, final Bson update) { + return this.updateOne(filter, update, new SyncUpdateOptions()); + } + + @Override + public SyncUpdateResult updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions + ) { + return syncOperations.updateOne(filter, update, updateOptions).execute(service); + } + + @Override + public SyncUpdateResult updateMany(final Bson filter, final Bson update) { + return this.updateMany(filter, update, new SyncUpdateOptions()); + } + + @Override + public SyncUpdateResult updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions + ) { + return this.syncOperations.updateMany(filter, update, updateOptions).execute(service); + } + + @Override + public SyncInsertOneResult insertOneAndSync(final DocumentT document) { return syncOperations.insertOneAndSync(document).execute(service); } - /** - * Deletes a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @return the result of the local or remote update. - */ - public RemoteDeleteResult deleteOneById(final BsonValue documentId) { - return syncOperations.deleteOneById(documentId).execute(service); + @Override + public SyncInsertManyResult insertManyAndSync(final List documents) { + return syncOperations.insertManyAndSync(documents).execute(service); + } + + @Override + public SyncDeleteResult deleteOne(final Bson filter) { + return syncOperations.deleteOne(filter).execute(service); + } + + @Override + public SyncDeleteResult deleteMany(final Bson filter) { + return syncOperations.deleteMany(filter).execute(service); } @Override diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncMongoIterableImpl.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncMongoIterableImpl.java index 0c75c18ae..34947235c 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncMongoIterableImpl.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncMongoIterableImpl.java @@ -53,7 +53,7 @@ CoreStitchServiceClient getService() { return service; } - Class geResultClass() { + Class getResultClass() { return resultClass; } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CountOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CountOperation.java new file mode 100644 index 000000000..58a9ef27d --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CountOperation.java @@ -0,0 +1,49 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; + +import com.mongodb.MongoNamespace; +import com.mongodb.client.model.CountOptions; +import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; +import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; + +import javax.annotation.Nullable; + +import org.bson.conversions.Bson; + +class CountOperation implements Operation { + private final MongoNamespace namespace; + private final DataSynchronizer dataSynchronizer; + private final Bson filter; + private final CountOptions countOptions; + + CountOperation( + final MongoNamespace namespace, + final DataSynchronizer dataSynchronizer, + final Bson filter, + final CountOptions countOptions + ) { + this.namespace = namespace; + this.dataSynchronizer = dataSynchronizer;; + this.filter = filter; + this.countOptions = countOptions; + } + + public Long execute(@Nullable final CoreStitchServiceClient service) { + return this.dataSynchronizer.count(namespace, filter, countOptions); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizer.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizer.java index 03401ccbc..3277b1b9a 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizer.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizer.java @@ -21,16 +21,22 @@ import static com.mongodb.stitch.core.services.mongodb.remote.sync.internal.ChangeEvent.changeEventForLocalReplace; import static com.mongodb.stitch.core.services.mongodb.remote.sync.internal.ChangeEvent.changeEventForLocalUpdate; +import com.mongodb.Block; +import com.mongodb.Function; import com.mongodb.MongoClientSettings; import com.mongodb.MongoNamespace; +import com.mongodb.client.AggregateIterable; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CountOptions; import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; +import com.mongodb.lang.NonNull; import com.mongodb.stitch.core.StitchServiceErrorCode; import com.mongodb.stitch.core.StitchServiceException; import com.mongodb.stitch.core.internal.common.AuthMonitor; @@ -49,10 +55,10 @@ import java.lang.ref.WeakReference; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; @@ -71,6 +77,7 @@ import org.bson.codecs.Codec; import org.bson.codecs.configuration.CodecRegistries; import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; import org.bson.diagnostics.Logger; import org.bson.diagnostics.Loggers; @@ -83,7 +90,6 @@ // TODO: Threading model okay? // TODO: implement unwatch // TODO: filter out and forbid usage of version ids outside of here -// TODO: findOneById with filter // TODO: Test delete/delete insert/insert update/update etc... // TODO: StitchReachabilityMonitor for when Stitch goes down and we can gracefully fail and give // you local only results. @@ -394,7 +400,7 @@ public boolean doSyncPass() { * state of said documents. Utilizes change streams to get "recent" updates to documents of * interest. Documents that are being synchronized from the first time will be fetched via a * full document lookup. Documents that have gone stale will be updated via change events or - * latest documents from the remote. Any conflicts that occur will be resolved locally and + * latest documents with the remote. Any conflicts that occur will be resolved locally and * later relayed remotely on a subsequent iteration of {@link DataSynchronizer#doSyncPass()}. */ private void syncRemoteToLocal() { @@ -1042,7 +1048,7 @@ private void syncLocalToRemote() { String.format( Locale.US, "t='%d': syncLocalToRemote ns=%s documentId=%s exception " - + "updating: %s", + + "updating: %s", logicalT, nsConfig.getNamespace(), docConfig.getDocumentId(), @@ -1348,8 +1354,7 @@ private void resolveConflict( // Update the document locally which will keep the pending writes but with // a new version next time around. - @SuppressWarnings("unchecked") - final BsonDocument docForStorage = + @SuppressWarnings("unchecked") final BsonDocument docForStorage = BsonUtils.documentToBsonDocument( resolvedDocument, syncConfig.getNamespaceConfig(namespace).getDocumentCodec()); @@ -1455,7 +1460,7 @@ public void addWatcher(final MongoNamespace namespace, } public void removeWatcher(final MongoNamespace namespace, - final Callback, Object> watcher) { + final Callback, Object> watcher) { instanceChangeStreamListener.removeWatcher(namespace, watcher); } @@ -1543,6 +1548,7 @@ public void desyncDocumentFromRemote( final BsonValue documentId ) { syncConfig.removeSynchronizedDocument(namespace, documentId); + getLocalCollection(namespace).deleteOne(getDocumentIdFilter(documentId)); triggerListeningToNamespace(namespace); } @@ -1553,10 +1559,10 @@ public void desyncDocumentFromRemote( * * This method allows you to resume sync for a document. * - * @param namespace namespace for the document + * @param namespace namespace for the document * @param documentId the id of the document to resume syncing * @return true if successfully resumed, false if the document - * could not be found or there was an error resuming + * could not be found or there was an error resuming */ boolean resumeSyncForDocument( final MongoNamespace namespace, @@ -1578,6 +1584,45 @@ boolean resumeSyncForDocument( return !config.isPaused(); } + /** + * Counts the number of documents in the collection. + * + * @return the number of documents in the collection + */ + long count(final MongoNamespace namespace) { + return count(namespace, new BsonDocument()); + } + + /** + * Counts the number of documents in the collection according to the given options. + * + * @param filter the query filter + * @return the number of documents in the collection + */ + long count(final MongoNamespace namespace, final Bson filter) { + return count(namespace, filter, new CountOptions()); + } + + /** + * Counts the number of documents in the collection according to the given options. + * + * @param filter the query filter + * @param options the options describing the count + * @return the number of documents in the collection + */ + long count(final MongoNamespace namespace, final Bson filter, final CountOptions options) { + return getLocalCollection(namespace).countDocuments(filter, options); + } + + Collection find( + final MongoNamespace namespace, + final BsonDocument filter + ) { + return getLocalCollection(namespace) + .find(filter) + .into(new ArrayList<>()); + } + public Collection find( final MongoNamespace namespace, final BsonDocument filter, @@ -1587,14 +1632,8 @@ public Collection find( final Class resultClass, final CodecRegistry codecRegistry ) { - // TODO: lock down ids - final Set syncedIds = getSynchronizedDocumentIds(namespace); - final BsonDocument finalFilter = new BsonDocument("$and", new BsonArray(Arrays.asList( - new BsonDocument("_id", new BsonDocument("$in", new BsonArray(new ArrayList<>(syncedIds)))), - filter - ))); return getLocalCollection(namespace, resultClass, codecRegistry) - .find(finalFilter) + .find(filter) .limit(limit) .projection(projection) .sort(sort) @@ -1602,29 +1641,30 @@ public Collection find( } /** - * Finds a single synchronized document by the given _id. If the document is not being - * synchronized or has not yet been found remotely, null will be returned. + * Aggregates documents according to the specified aggregation pipeline. * - * @param namespace the namespace to search for the document in. - * @param documentId the _id of the document. - * @param resultClass the {@link Class} that represents this document in the collection. - * @param codecRegistry the {@link CodecRegistry} that contains a codec for resultClass. - * @param the type of the document in the collection. - * @return the synchronized document if it exists; null otherwise. + * @param pipeline the aggregation pipeline + * @return an iterable containing the result of the aggregation operation */ - public T findOneById( + AggregateIterable aggregate( final MongoNamespace namespace, - final BsonValue documentId, - final Class resultClass, - final CodecRegistry codecRegistry - ) { - // TODO: lock down id - if (!syncConfig.isDocumentSynchronized(namespace, documentId)) { - return null; - } + final List pipeline) { + return aggregate(namespace, pipeline, BsonDocument.class); + } - final BsonDocument filter = new BsonDocument("_id", documentId); - return getLocalCollection(namespace, resultClass, codecRegistry).find(filter).first(); + /** + * Aggregates documents according to the specified aggregation pipeline. + * + * @param pipeline the aggregation pipeline + * @param resultClass the class to decode each document into + * @param the target document type of the iterable. + * @return an iterable containing the result of the aggregation operation + */ + AggregateIterable aggregate( + final MongoNamespace namespace, + final List pipeline, + final Class resultClass) { + return getLocalCollection(namespace).aggregate(pipeline, resultClass); } /** @@ -1634,71 +1674,229 @@ public T findOneById( * @param namespace the namespace to put the document in. * @param document the document to insert. */ - public void insertOneAndSync( - final MongoNamespace namespace, - final BsonDocument document - ) { + void insertOneAndSync(final MongoNamespace namespace, final BsonDocument document) { getLocalCollection(namespace).insertOne(document); - final ChangeEvent event = - changeEventForLocalInsert(namespace, document, true); + final BsonValue documentId = BsonUtils.getDocumentId(document); + final ChangeEvent event = changeEventForLocalInsert(namespace, document, true); final CoreDocumentSynchronizationConfig config = syncConfig.addSynchronizedDocument( namespace, - BsonUtils.getDocumentId(document) + documentId ); config.setSomePendingWrites(logicalT, event); - final BsonValue documentId = BsonUtils.getDocumentId(document); triggerListeningToNamespace(namespace); emitEvent(documentId, event); } /** - * Updates a single synchronized document by its given id with the given update specifiers. - * No update will occur if the _id is not being synchronized. + * Inserts one or more documents. * - * @param namespace the namespace where the document lives. - * @param documentId the _id of the document. - * @param update the update modifiers. - * @return the result of the update. + * @param documents the documents to insert */ - public UpdateResult updateOneById( - final MongoNamespace namespace, - final BsonValue documentId, - final BsonDocument update - ) { - // TODO: lock down id - final CoreDocumentSynchronizationConfig config = - syncConfig.getSynchronizedDocument(namespace, documentId); - if (config == null) { - return UpdateResult.acknowledged(0, 0L, null); + void insertManyAndSync(final MongoNamespace namespace, + final List documents) { + getLocalCollection(namespace).insertMany(documents); + for (final BsonDocument document : documents) { + final BsonValue documentId = BsonUtils.getDocumentId(document); + final ChangeEvent event = changeEventForLocalInsert(namespace, document, true); + final CoreDocumentSynchronizationConfig config = syncConfig.addSynchronizedDocument( + namespace, + documentId + ); + config.setSomePendingWrites(logicalT, event); + emitEvent(documentId, event); } + triggerListeningToNamespace(namespace); + } - // TODO: STITCH-1958 - final BsonDocument documentBeforeUpdate = - getLocalCollection(namespace).find(getDocumentIdFilter(documentId)).first(); + /** + * Update a single document in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update one operation + */ + UpdateResult updateOne(final MongoNamespace namespace, final Bson filter, final Bson update) { + return updateOne(namespace, filter, update, new UpdateOptions()); + } - final BsonDocument result = getLocalCollection(namespace) - .findOneAndUpdate( - getDocumentIdFilter(documentId), - update, - new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER)); + /** + * Update a single document in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update one operation + */ + UpdateResult updateOne( + final MongoNamespace namespace, + final Bson filter, + final Bson update, + final UpdateOptions updateOptions) { + // read the local collection + final MongoCollection localCollection = getLocalCollection(namespace); + + // fetch the document prior to updating + final BsonDocument documentBeforeUpdate = getLocalCollection(namespace).find(filter).first(); + + // if there was no document prior and this is not an upsert, + // do not acknowledge the update + if (!updateOptions.isUpsert() && documentBeforeUpdate == null) { + return UpdateResult.acknowledged(0, 0L, null); + } - if (result == null) { + // find and update the single document, returning the document post-update + final BsonDocument documentAfterUpdate = localCollection.findOneAndUpdate( + filter, + update, + new FindOneAndUpdateOptions() + .collation(updateOptions.getCollation()) + .upsert(updateOptions.isUpsert()) + .bypassDocumentValidation(updateOptions.getBypassDocumentValidation()) + .arrayFilters(updateOptions.getArrayFilters()) + .returnDocument(ReturnDocument.AFTER)); + + // if the document was deleted between our earlier check and now, it will not have + // been updated. do not acknowledge the update + if (documentAfterUpdate == null) { return UpdateResult.acknowledged(0, 0L, null); } - final ChangeEvent event = - changeEventForLocalUpdate( - namespace, - documentId, - ChangeEvent.UpdateDescription.diff(documentBeforeUpdate, result), - result, - true); + final ChangeEvent event; + final CoreDocumentSynchronizationConfig config; + final BsonValue documentId = BsonUtils.getDocumentId(documentAfterUpdate); + + // if there was no document prior and this was an upsert, + // treat this as an insert. + // else this is an update + if (documentBeforeUpdate == null && updateOptions.isUpsert()) { + config = syncConfig.addSynchronizedDocument(namespace, documentId); + triggerListeningToNamespace(namespace); + event = changeEventForLocalInsert(namespace, documentAfterUpdate, true); + } else { + config = syncConfig.getSynchronizedDocument(namespace, documentId); + event = changeEventForLocalUpdate( + namespace, + BsonUtils.getDocumentId(documentAfterUpdate), + ChangeEvent.UpdateDescription.diff(documentBeforeUpdate, documentAfterUpdate), + documentAfterUpdate, + true); + } - config.setSomePendingWrites( - logicalT, - event); + config.setSomePendingWrites(logicalT, event); emitEvent(documentId, event); - return UpdateResult.acknowledged(1, 1L, null); + return UpdateResult.acknowledged(1, 1L, updateOptions.isUpsert() ? documentId : null); + } + + /** + * Update all documents in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update many operation + */ + UpdateResult updateMany(final MongoNamespace namespace, + final Bson filter, + final Bson update) { + return updateMany(namespace, filter, update, new UpdateOptions()); + } + + /** + * Update all documents in the collection according to the specified arguments. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update many operation + */ + UpdateResult updateMany( + final MongoNamespace namespace, + final Bson filter, + final Bson update, + final UpdateOptions updateOptions) { + // fetch all of the documents that this filter will match + final Map idToBeforeDocumentMap = new HashMap<>(); + final BsonArray ids = new BsonArray(); + this.getLocalCollection(namespace) + .find(filter) + .forEach(new Block() { + @Override + public void apply(@NonNull final BsonDocument bsonDocument) { + final BsonValue documentId = BsonUtils.getDocumentId(bsonDocument); + ids.add(documentId); + idToBeforeDocumentMap.put(documentId, bsonDocument); + } + }); + + // use the matched ids from prior to create a new filter. + // this will prevent any race conditions if documents were + // inserted between the prior find + Bson updatedFilter = updateOptions.isUpsert() + ? filter : new BsonDocument("_id", new BsonDocument("$in", ids)); + + // do the bulk write + final UpdateResult result = this.getLocalCollection(namespace) + .updateMany(updatedFilter, update, updateOptions); + + // if this was an upsert, create the post-update filter using + // the upserted id. + if (result.getUpsertedId() != null) { + updatedFilter = getDocumentIdFilter(result.getUpsertedId()); + } + + // iterate over the after-update docs using the updated filter + this.getLocalCollection(namespace).find(updatedFilter).forEach(new Block() { + @Override + public void apply(@NonNull final BsonDocument afterDocument) { + // get the id of the after-update document, and fetch the before-update + // document from the map we created from our pre-update `find` + final BsonValue documentId = BsonUtils.getDocumentId(afterDocument); + final BsonDocument beforeDocument = idToBeforeDocumentMap.get(documentId); + + // if there was no before-update document and this was not an upsert, + // a document that meets the filter criteria must have been + // inserted or upserted asynchronously between this find and the update. + if (beforeDocument == null && !updateOptions.isUpsert()) { + return; + } + + // because we are looking up a bulk write, we may have queried documents + // that match the updated state, but were not actually modified. + // if the document before the update is the same as the updated doc, + // assume it was not modified and take no further action + if (afterDocument.equals(beforeDocument)) { + return; + } + + final CoreDocumentSynchronizationConfig config; + final ChangeEvent event; + + // if there was no earlier document and this was an upsert, + // treat the upsert as an insert, as far as sync is concerned + // else treat it as a standard update + if (beforeDocument == null && updateOptions.isUpsert()) { + config = syncConfig.addSynchronizedDocument(namespace, documentId); + triggerListeningToNamespace(namespace); + event = changeEventForLocalInsert(namespace, afterDocument, true); + } else { + config = syncConfig.getSynchronizedDocument(namespace, documentId); + event = changeEventForLocalUpdate( + namespace, + documentId, + ChangeEvent.UpdateDescription.diff(beforeDocument, afterDocument), + afterDocument, + true); + } + + config.setSomePendingWrites(logicalT, event); + emitEvent(documentId, event); + } + }); + + return result; } /** @@ -1779,18 +1977,24 @@ private void replaceOrUpsertOneFromRemote( } /** - * Deletes a single synchronized document by its given id. No deletion will occur if the _id is - * not being synchronized. + * Removes at most one document from the collection that matches the given filter. If no + * documents match, the collection is not + * modified. * - * @param namespace the namespace where the document lives. - * @param documentId the _id of the document. - * @return the result of the deletion. + * @param filter the query filter to apply the the delete operation + * @return the result of the remove one operation */ - public DeleteResult deleteOneById( - final MongoNamespace namespace, - final BsonValue documentId - ) { - // TODO: lock down id + DeleteResult deleteOne(final MongoNamespace namespace, final Bson filter) { + final MongoCollection localCollection = getLocalCollection(namespace); + final BsonDocument docToDelete = localCollection + .find(filter) + .first(); + + if (docToDelete == null) { + return DeleteResult.acknowledged(0); + } + + final BsonValue documentId = BsonUtils.getDocumentId(docToDelete); final CoreDocumentSynchronizationConfig config = syncConfig.getSynchronizedDocument(namespace, documentId); @@ -1798,10 +2002,8 @@ public DeleteResult deleteOneById( return DeleteResult.acknowledged(0); } - final DeleteResult result = getLocalCollection(namespace) - .deleteOne(getDocumentIdFilter(documentId)); - final ChangeEvent event = - changeEventForLocalDelete(namespace, documentId, true); + final DeleteResult result = getLocalCollection(namespace).deleteOne(filter); + final ChangeEvent event = changeEventForLocalDelete(namespace, documentId, true); // this block is to trigger coalescence for a delete after insert if (config.getLastUncommittedChangeEvent() != null @@ -1811,12 +2013,61 @@ public DeleteResult deleteOneById( return result; } - config.setSomePendingWrites( - logicalT, event); + config.setSomePendingWrites(logicalT, event); emitEvent(documentId, event); return result; } + /** + * Removes all documents from the collection that match the given query filter. If no documents + * match, the collection is not modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove many operation + */ + DeleteResult deleteMany(final MongoNamespace namespace, + final Bson filter) { + final MongoCollection localCollection = getLocalCollection(namespace); + final Set idsToDelete = + localCollection + .find(filter) + .map(new Function() { + @Override + @NonNull + public BsonValue apply(@NonNull final BsonDocument bsonDocument) { + return BsonUtils.getDocumentId(bsonDocument); + } + }).into(new HashSet<>()); + + final DeleteResult result = getLocalCollection(namespace).deleteMany(filter); + + for (final BsonValue documentId : idsToDelete) { + final CoreDocumentSynchronizationConfig config = + syncConfig.getSynchronizedDocument(namespace, documentId); + + if (config == null) { + continue; + } + + final ChangeEvent event = + changeEventForLocalDelete(namespace, documentId, true); + + // this block is to trigger coalescence for a delete after insert + if (config.getLastUncommittedChangeEvent() != null + && config.getLastUncommittedChangeEvent().getOperationType() + == ChangeEvent.OperationType.INSERT) { + desyncDocumentFromRemote(config.getNamespace(), config.getDocumentId()); + return result; + } + + config.setSomePendingWrites( + logicalT, event); + emitEvent(documentId, event); + } + + return result; + } + /** * Deletes a single synchronized document by its given id. No deletion will occur if the _id is * not being synchronized. @@ -2037,7 +2288,7 @@ private Set getLatestDocumentsForStaleFromRemote( private Set getDocumentIds(final Set documents) { final Set ids = new HashSet<>(); - for (final BsonDocument document: documents) { + for (final BsonDocument document : documents) { ids.add(document.get("_id")); } return ids; @@ -2056,13 +2307,13 @@ private static BsonDocument getDocumentIdFilter(final BsonValue documentId) { /** * Adds and returns a document with a new version to the given document. * - * @param document the document to attach a new version to. + * @param document the document to attach a new version to. * @param newVersion the version to attach to the document * @return a document with a new version to the given document. */ private static BsonDocument withNewVersion( - final BsonDocument document, - final BsonDocument newVersion + final BsonDocument document, + final BsonDocument newVersion ) { final BsonDocument newDocument = BsonUtils.copyOfDocument(document); newDocument.put(DOCUMENT_VERSION_FIELD, newVersion); diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneByIdOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteManyOperation.java similarity index 64% rename from core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneByIdOperation.java rename to core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteManyOperation.java index 29d9290f5..3732b1d94 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneByIdOperation.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteManyOperation.java @@ -19,33 +19,30 @@ import com.mongodb.MongoNamespace; import com.mongodb.client.result.DeleteResult; import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; + import javax.annotation.Nullable; -import org.bson.BsonValue; -class DeleteOneByIdOperation implements Operation { +import org.bson.conversions.Bson; +class DeleteManyOperation implements Operation { private final MongoNamespace namespace; - private final BsonValue documentId; + private final Bson filter; private final DataSynchronizer dataSynchronizer; - DeleteOneByIdOperation( + DeleteManyOperation( final MongoNamespace namespace, - final BsonValue documentId, + final Bson filter, final DataSynchronizer dataSynchronizer ) { this.namespace = namespace; - this.documentId = documentId; + this.filter = filter; this.dataSynchronizer = dataSynchronizer; } - public RemoteDeleteResult execute(@Nullable final CoreStitchServiceClient service) { - final DeleteResult localResult = - this.dataSynchronizer.deleteOneById(namespace, documentId); - if (localResult.getDeletedCount() == 1) { - return new RemoteDeleteResult(localResult.getDeletedCount()); - } - return new RemoteDeleteResult(0); + public SyncDeleteResult execute(@Nullable final CoreStitchServiceClient service) { + final DeleteResult localResult = this.dataSynchronizer.deleteMany(namespace, filter); + return new SyncDeleteResult(localResult.getDeletedCount()); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/FindOneByIdOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneOperation.java similarity index 65% rename from core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/FindOneByIdOperation.java rename to core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneOperation.java index 42c624392..7127995bd 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/FindOneByIdOperation.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DeleteOneOperation.java @@ -17,36 +17,31 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; import com.mongodb.MongoNamespace; +import com.mongodb.client.result.DeleteResult; import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; -import javax.annotation.Nonnull; -import org.bson.BsonValue; - -class FindOneByIdOperation implements Operation { +import javax.annotation.Nullable; +import org.bson.conversions.Bson; +class DeleteOneOperation implements Operation { private final MongoNamespace namespace; - private final BsonValue documentId; - private final Class resultClass; + private final Bson filter; private final DataSynchronizer dataSynchronizer; - FindOneByIdOperation( + DeleteOneOperation( final MongoNamespace namespace, - final BsonValue documentId, - final Class resultClass, + final Bson filter, final DataSynchronizer dataSynchronizer ) { this.namespace = namespace; - this.documentId = documentId; - this.resultClass = resultClass; + this.filter = filter; this.dataSynchronizer = dataSynchronizer; } - public T execute(@Nonnull final CoreStitchServiceClient service) { - return this.dataSynchronizer.findOneById( - namespace, - documentId, - resultClass, - service.getCodecRegistry()); + public SyncDeleteResult execute(@Nullable final CoreStitchServiceClient service) { + final DeleteResult localResult = this.dataSynchronizer.deleteOne(namespace, filter); + return new SyncDeleteResult(localResult.getDeletedCount()); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneByIdOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertManyAndSyncOperation.java similarity index 59% rename from core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneByIdOperation.java rename to core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertManyAndSyncOperation.java index de85459e9..788cf6cff 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneByIdOperation.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertManyAndSyncOperation.java @@ -17,43 +17,42 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; import com.mongodb.MongoNamespace; -import com.mongodb.client.result.UpdateResult; +import com.mongodb.stitch.core.internal.common.BsonUtils; import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import javax.annotation.Nullable; + import org.bson.BsonDocument; import org.bson.BsonValue; -class UpdateOneByIdOperation implements Operation { +class InsertManyAndSyncOperation implements Operation { private final MongoNamespace namespace; - private final BsonValue documentId; - private final BsonDocument update; + private final List documents; private final DataSynchronizer dataSynchronizer; - UpdateOneByIdOperation( + InsertManyAndSyncOperation( final MongoNamespace namespace, - final BsonValue documentId, - final BsonDocument update, + final List documents, final DataSynchronizer dataSynchronizer ) { this.namespace = namespace; - this.documentId = documentId; - this.update = update; + this.documents = documents; this.dataSynchronizer = dataSynchronizer; } - public RemoteUpdateResult execute(@Nullable final CoreStitchServiceClient service) { - final UpdateResult localResult = - this.dataSynchronizer.updateOneById(namespace, documentId, update); - if (localResult.getMatchedCount() == 1) { - return new RemoteUpdateResult( - localResult.getMatchedCount(), - localResult.getModifiedCount(), - null); + public SyncInsertManyResult execute(@Nullable final CoreStitchServiceClient service) { + this.dataSynchronizer.insertManyAndSync(namespace, documents); + final Map indexToId = new HashMap<>(); + for (int i = 0; i < this.documents.size(); i++) { + indexToId.put((long)i, BsonUtils.getDocumentId(this.documents.get(i))); } - - return new RemoteUpdateResult(0, 0, null); + return new SyncInsertManyResult(indexToId); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertOneAndSyncOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertOneAndSyncOperation.java index e7e4a51d6..12aed8a96 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertOneAndSyncOperation.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/InsertOneAndSyncOperation.java @@ -19,12 +19,13 @@ import com.mongodb.MongoNamespace; import com.mongodb.stitch.core.internal.common.BsonUtils; import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; + import javax.annotation.Nullable; import org.bson.BsonDocument; -class InsertOneAndSyncOperation implements Operation { +class InsertOneAndSyncOperation implements Operation { private final MongoNamespace namespace; private final BsonDocument document; @@ -40,8 +41,8 @@ class InsertOneAndSyncOperation implements Operation { this.dataSynchronizer = dataSynchronizer; } - public RemoteInsertOneResult execute(@Nullable final CoreStitchServiceClient service) { + public SyncInsertOneResult execute(@Nullable final CoreStitchServiceClient service) { this.dataSynchronizer.insertOneAndSync(namespace, document); - return new RemoteInsertOneResult(BsonUtils.getDocumentId(document)); + return new SyncInsertOneResult(BsonUtils.getDocumentId(document)); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncOperations.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncOperations.java index b75538ace..cd98f3aaa 100644 --- a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncOperations.java +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncOperations.java @@ -22,11 +22,16 @@ import static com.mongodb.stitch.core.internal.common.BsonUtils.toBsonDocument; import com.mongodb.MongoNamespace; +import com.mongodb.client.model.CountOptions; import com.mongodb.stitch.core.internal.common.BsonUtils; import com.mongodb.stitch.core.services.mongodb.remote.RemoteFindOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; + +import java.util.ArrayList; +import java.util.List; import org.bson.BsonDocument; -import org.bson.BsonValue; import org.bson.codecs.CollectibleCodec; import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; @@ -66,8 +71,6 @@ SyncFindOperation find( return createSyncFindOperation(namespace, filter, resultClass, options); } - - private SyncFindOperation createSyncFindOperation( final MongoNamespace findNamespace, final Bson filter, @@ -93,32 +96,55 @@ private SyncFindOperation createSyncFindOperation( .sort(sortDoc); } - FindOneByIdOperation findOneById( - final BsonValue documentId, - final Class resultClass - ) { - notNull("documentId", documentId); - return new FindOneByIdOperation<>( + CountOperation count(final Bson filter, final SyncCountOptions countOptions) { + return new CountOperation( namespace, - documentId, - resultClass, - dataSynchronizer); + dataSynchronizer, + filter, + new CountOptions().limit(countOptions.getLimit())); } - UpdateOneByIdOperation updateOneById( - final BsonValue documentId, - final Bson update + /** + * Aggregates documents according to the specified aggregation pipeline. + * + * @param pipeline the aggregation pipeline + * @param resultClass the class to decode each document into + * @param the target document type of the iterable. + * @return an iterable containing the result of the aggregation operation + */ + AggregateOperation aggregate( + final List pipeline, + final Class resultClass) { + return new AggregateOperation<>(namespace, dataSynchronizer, pipeline, resultClass); + } + + UpdateOneOperation updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions ) { - return new UpdateOneByIdOperation<>( + return new UpdateOneOperation( namespace, - documentId, + filter, toBsonDocument(update, documentClass, codecRegistry), - dataSynchronizer); + dataSynchronizer, + updateOptions); } - public InsertOneAndSyncOperation insertOneAndSync( - final DocumentT document + UpdateManyOperation updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions ) { + return new UpdateManyOperation( + namespace, + filter, + toBsonDocument(update, documentClass, codecRegistry), + dataSynchronizer, + updateOptions); + } + + public InsertOneAndSyncOperation insertOneAndSync(final DocumentT document) { notNull("document", document); final DocumentT docToInsert; if (getCodec(codecRegistry, documentClass) instanceof CollectibleCodec) { @@ -128,16 +154,46 @@ public InsertOneAndSyncOperation insertOneAndSync( } else { docToInsert = document; } - return new InsertOneAndSyncOperation<>( + + return new InsertOneAndSyncOperation( namespace, documentToBsonDocument(docToInsert, codecRegistry), dataSynchronizer); } - DeleteOneByIdOperation deleteOneById(final BsonValue documentId) { - return new DeleteOneByIdOperation( + InsertManyAndSyncOperation insertManyAndSync(final List documents) { + final List bsonDocuments = new ArrayList<>(); + for (final DocumentT document : documents) { + if (getCodec(codecRegistry, documentClass) instanceof CollectibleCodec) { + bsonDocuments.add( + documentToBsonDocument( + ((CollectibleCodec) getCodec(codecRegistry, documentClass)) + .generateIdIfAbsentFromDocument(document), + codecRegistry + ) + ); + } else { + bsonDocuments.add(documentToBsonDocument(document, codecRegistry)); + } + } + + return new InsertManyAndSyncOperation( + namespace, + bsonDocuments, + dataSynchronizer); + } + + DeleteOneOperation deleteOne(final Bson filter) { + return new DeleteOneOperation( + namespace, + filter, + dataSynchronizer); + } + + DeleteManyOperation deleteMany(final Bson filter) { + return new DeleteManyOperation( namespace, - documentId, + filter, dataSynchronizer); } } diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateManyOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateManyOperation.java new file mode 100644 index 000000000..50e4de80b --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateManyOperation.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; + +import com.mongodb.MongoNamespace; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; +import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; + +import javax.annotation.Nullable; + +import org.bson.BsonDocument; +import org.bson.conversions.Bson; + +class UpdateManyOperation implements Operation { + + private final MongoNamespace namespace; + private final Bson filter; + private final BsonDocument update; + private final DataSynchronizer dataSynchronizer; + private final SyncUpdateOptions syncUpdateOptions; + + UpdateManyOperation( + final MongoNamespace namespace, + final Bson filter, + final BsonDocument update, + final DataSynchronizer dataSynchronizer, + final SyncUpdateOptions syncUpdateOptions + ) { + this.namespace = namespace; + this.filter = filter; + this.update = update; + this.dataSynchronizer = dataSynchronizer; + this.syncUpdateOptions = syncUpdateOptions; + } + + public SyncUpdateResult execute(@Nullable final CoreStitchServiceClient service) { + final UpdateResult localResult = this.dataSynchronizer.updateMany( + namespace, + filter, + update, + new UpdateOptions().upsert(this.syncUpdateOptions.isUpsert())); + + return new SyncUpdateResult( + localResult.getMatchedCount(), + localResult.getModifiedCount(), + localResult.getUpsertedId() + ); + } +} diff --git a/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneOperation.java b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneOperation.java new file mode 100644 index 000000000..95c088e43 --- /dev/null +++ b/core/services/mongodb-remote/src/main/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/UpdateOneOperation.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.core.services.mongodb.remote.sync.internal; + +import com.mongodb.MongoNamespace; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.stitch.core.services.internal.CoreStitchServiceClient; +import com.mongodb.stitch.core.services.mongodb.remote.internal.Operation; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; + +import javax.annotation.Nullable; +import org.bson.BsonDocument; +import org.bson.conversions.Bson; + +class UpdateOneOperation implements Operation { + + private final MongoNamespace namespace; + private final Bson filter; + private final BsonDocument update; + private final DataSynchronizer dataSynchronizer; + private final SyncUpdateOptions syncUpdateOptions; + + UpdateOneOperation( + final MongoNamespace namespace, + final Bson filter, + final BsonDocument update, + final DataSynchronizer dataSynchronizer, + final SyncUpdateOptions syncUpdateOptions + ) { + this.namespace = namespace; + this.filter = filter; + this.update = update; + this.dataSynchronizer = dataSynchronizer; + this.syncUpdateOptions = syncUpdateOptions; + } + + public SyncUpdateResult execute(@Nullable final CoreStitchServiceClient service) { + final UpdateResult localResult = this.dataSynchronizer.updateOne( + namespace, + filter, + update, + new UpdateOptions().upsert(this.syncUpdateOptions.isUpsert())); + + return new SyncUpdateResult( + localResult.getMatchedCount(), + localResult.getModifiedCount(), + localResult.getUpsertedId() + ); + } +} diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEventUnitTests.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEventUnitTests.kt index 6675e7adc..5624b625b 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEventUnitTests.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/ChangeEventUnitTests.kt @@ -342,7 +342,7 @@ class ChangeEventUnitTests { assertEquals(removedFields, updateDoc["\$unset"]?.asDocument()?.entries?.map { it.key }) } - fun testDiff( + private fun testDiff( collection: MongoCollection, beforeDocument: BsonDocument, expectedUpdateDocument: BsonDocument, diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncUnitTests.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncUnitTests.kt index 9df196a9e..4605dba4f 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncUnitTests.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/CoreSyncUnitTests.kt @@ -1,12 +1,25 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync.internal +import com.mongodb.MongoBulkWriteException import com.mongodb.MongoWriteException import com.mongodb.stitch.core.services.mongodb.remote.RemoteFindOptions +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions import com.mongodb.stitch.server.services.mongodb.local.internal.ServerEmbeddedMongoClientFactory +import org.bson.BsonArray +import org.bson.BsonBoolean import org.bson.BsonDocument import org.bson.BsonInt32 +import org.bson.BsonReader +import org.bson.BsonString +import org.bson.BsonWriter +import org.bson.Document +import org.bson.codecs.Codec +import org.bson.codecs.DecoderContext +import org.bson.codecs.EncoderContext import org.junit.After import org.junit.Assert.assertEquals +import org.junit.Assert.assertFalse import org.junit.Assert.assertNotNull import org.junit.Assert.assertNull import org.junit.Assert.assertTrue @@ -19,6 +32,44 @@ import org.mockito.Mockito.times import org.mockito.Mockito.verify class CoreSyncUnitTests { + companion object { + data class CustomCodecConsideredHarmful( + val consideredHarmful: Boolean, + val author: String + ) + + class CustomCodecConsideredHarmfulCodec : Codec { + override fun getEncoderClass(): Class { + return CustomCodecConsideredHarmful::class.java + } + + override fun encode( + writer: BsonWriter?, + value: CustomCodecConsideredHarmful?, + encoderContext: EncoderContext? + ) { + if (value != null && writer != null) { + writer.writeStartDocument() + writer.writeName("consideredHarmful") + writer.writeBoolean(value.consideredHarmful) + writer.writeName("author") + writer.writeString(value.author) + writer.writeEndDocument() + } + } + + override fun decode( + reader: BsonReader?, + decoderContext: DecoderContext? + ): CustomCodecConsideredHarmful? { + return if (reader == null) + null + else { + CustomCodecConsideredHarmful(reader.readBoolean(), reader.readString()) + } + } + } + } private val harness = SyncUnitTestHarness() @After @@ -31,7 +82,7 @@ class CoreSyncUnitTests { @Test fun testSyncOne() { val ctx = harness.freshTestContext() - val (coreSync, _) = harness.createCoreSyncWithContext(ctx) + val (coreSync, _) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) // assert that calling syncOne on coreSync proxies the appropriate call // to the data synchronizer. assert that the appropriate document is being synchronized coreSync.syncOne(ctx.testDocumentId) @@ -47,7 +98,7 @@ class CoreSyncUnitTests { @Test fun testSyncMany() { val ctx = harness.freshTestContext() - val (coreSync, _) = harness.createCoreSyncWithContext(ctx) + val (coreSync, _) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) // assert that calling syncMany on coreSync proxies the appropriate call to the data // synchronizer for each document being sync'd @@ -57,10 +108,28 @@ class CoreSyncUnitTests { eq(ctx.testDocumentId)) } + @Test + fun testCount() { + val ctx = harness.freshTestContext() + val (coreSync, _) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) + + val doc1 = BsonDocument("a", BsonString("b")) + val doc2 = BsonDocument("c", BsonString("d")) + + coreSync.insertManyAndSync(listOf(doc1, doc2)) + + assertEquals(2, coreSync.count()) + assertEquals(1, coreSync.count(BsonDocument("_id", doc1["_id"]))) + + assertEquals(1, coreSync.count(BsonDocument(), SyncCountOptions().limit(1))) + + verify(ctx.dataSynchronizer, times(3)).count(eq(ctx.namespace), any(), any()) + } + @Test fun testFind() { val ctx = harness.freshTestContext() - val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx) + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) var findIterable = coreSync.find() @@ -117,55 +186,109 @@ class CoreSyncUnitTests { } @Test - fun testFindOneById() { + fun testAggregate() { val ctx = harness.freshTestContext() - val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx) - - assertNull(coreSync.findOneById(ctx.testDocumentId)) - - ctx.insertTestDocument() - - assertEquals( - ctx.testDocument, - SyncUnitTestHarness.withoutSyncVersion(coreSync.findOneById(ctx.testDocumentId))) - - verify(syncOperations, times(2)).findOneById( - eq(ctx.testDocumentId), eq(BsonDocument::class.java)) - - verify(ctx.dataSynchronizer, times(2)).findOneById( - eq(ctx.namespace), eq(ctx.testDocumentId), eq(BsonDocument::class.java), any() - ) + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) + + val doc1 = BsonDocument("a", BsonString("b")).append("c", BsonString("d")) + val doc2 = BsonDocument("a", BsonString("b")).append("c", BsonString("d")) + val doc3 = BsonDocument("a", BsonString("b")).append("c", BsonString("q")) + val doc4 = BsonDocument("a", BsonString("b")).append("c", BsonString("d")) + val doc5 = BsonDocument("e", BsonString("f")).append("g", BsonString("h")) + + coreSync.insertManyAndSync(listOf(doc1, doc2, doc3, doc4, doc5)) + + val pipeline = listOf( + BsonDocument( + "\$match", BsonDocument("_id", BsonDocument("\$in", BsonArray( + listOf(doc1["_id"], doc2["_id"], doc4["_id"]) + )))), + BsonDocument( + "\$project", + BsonDocument("c", BsonInt32(0)) + )) + + val agg = coreSync.aggregate(pipeline).toList() + + assertEquals(3, agg.size) + agg.forEach { assertFalse(it.containsKey("c")) } + val ids = agg.map { it["_id"] } + assertTrue(ids.contains(doc1["_id"])) + assertTrue(ids.contains(doc2["_id"])) + assertTrue(ids.contains(doc4["_id"])) + + verify(syncOperations, times(1)).aggregate(eq(pipeline), eq(BsonDocument::class.java)) + + verify(ctx.dataSynchronizer, times(1)).aggregate( + eq(ctx.namespace), eq(pipeline), eq(BsonDocument::class.java)) } @Test - fun testUpdateOneById() { + fun testUpdateOne() { val ctx = harness.freshTestContext() - val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx) + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) - var result = coreSync.updateOneById(ctx.testDocumentId, ctx.updateDocument) + var result = coreSync.updateOne(ctx.testDocumentFilter, + ctx.updateDocument, + SyncUpdateOptions().upsert(false)) assertEquals(0, result.matchedCount) assertEquals(0, result.modifiedCount) assertNull(result.upsertedId) ctx.insertTestDocument() - result = coreSync.updateOneById(ctx.testDocumentId, ctx.updateDocument) + result = coreSync.updateOne(ctx.testDocumentFilter, ctx.updateDocument) assertEquals(1, result.matchedCount) assertEquals(1, result.modifiedCount) assertNull(result.upsertedId) - verify(syncOperations, times(2)).updateOneById( - eq(ctx.testDocumentId), eq(ctx.updateDocument)) + verify(syncOperations, times(2)).updateOne( + eq(ctx.testDocumentFilter), eq(ctx.updateDocument), any()) + + verify(ctx.dataSynchronizer, times(2)).updateOne( + eq(ctx.namespace), eq(ctx.testDocumentFilter), eq(ctx.updateDocument), any()) + } + + @Test + fun testUpdateMany() { + val ctx = harness.freshTestContext() + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) + + val doc1 = BsonDocument("a", BsonString("b")) + val doc2 = BsonDocument("c", BsonString("d")) + val doc3 = BsonDocument("a", BsonString("r")) + + val insertResult = coreSync.insertManyAndSync(listOf(doc1, doc2, doc3)) + + assertEquals(3, insertResult.insertedIds.size) - verify(ctx.dataSynchronizer, times(2)).updateOneById( - eq(ctx.namespace), eq(ctx.testDocumentId), eq(ctx.updateDocument)) + val updateFilter = BsonDocument("a", BsonDocument("\$exists", BsonBoolean(true))) + val updateDoc = BsonDocument("\$set", BsonDocument("a", BsonString("z"))) + val updateResult = coreSync.updateMany(updateFilter, updateDoc) + + assertEquals(2, updateResult.matchedCount) + assertEquals(2, updateResult.modifiedCount) + assertNull(updateResult.upsertedId) + + assertEquals(BsonDocument("a", BsonString("z")).append("_id", doc1["_id"]), + coreSync.find(BsonDocument("_id", doc1["_id"])).first()) + assertEquals(BsonDocument("c", BsonString("d")).append("_id", doc2["_id"]), + coreSync.find(BsonDocument("_id", doc2["_id"])).first()) + assertEquals(BsonDocument("a", BsonString("z")).append("_id", doc3["_id"]), + coreSync.find(BsonDocument("_id", doc3["_id"])).first()) + + verify(syncOperations, times(1)).updateMany( + eq(updateFilter), eq(updateDoc), any()) + + verify(ctx.dataSynchronizer, times(1)).updateMany( + eq(ctx.namespace), eq(updateFilter), eq(updateDoc), any()) } @Test fun testInsertOneAndSync() { val ctx = harness.freshTestContext() - val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx) + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) assertEquals( ctx.testDocumentId, @@ -187,24 +310,125 @@ class CoreSyncUnitTests { } @Test - fun testDeleteOneById() { + fun testInsertOneAndSyncCustomCodec() { + val ctx = harness.freshTestContext() + val (coreSync, syncOperations) = harness.createCoreSyncWithContext( + ctx, CustomCodecConsideredHarmful::class.java, CustomCodecConsideredHarmfulCodec()) + + val doc1 = CustomCodecConsideredHarmful(true, "Edsger Dijkstra") + + val result = coreSync.insertOneAndSync(doc1) + + val actualDoc1 = coreSync.find(Document(mapOf("consideredHarmful" to true)), BsonDocument::class.java).first() + + assertEquals(actualDoc1!!["_id"], result.insertedId) + + verify(syncOperations, times(1)).insertOneAndSync( + eq(doc1)) + + verify(ctx.dataSynchronizer, times(1)).insertOneAndSync( + eq(ctx.namespace), eq(actualDoc1)) + } + + @Test + fun testInsertManyAndSync() { + val ctx = harness.freshTestContext() + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) + + val doc1 = BsonDocument("a", BsonString("b")) + val doc2 = BsonDocument("c", BsonString("d")) + + val result = coreSync.insertManyAndSync(listOf(doc1, doc2)) + + assertEquals(doc1["_id"], result.insertedIds[0]) + assertEquals(doc2["_id"], result.insertedIds[1]) + + try { + coreSync.insertManyAndSync(listOf(doc1, doc2)) + fail("should have received duplicate key error index") + } catch (e: MongoBulkWriteException) { + assertNotNull(e.writeErrors[0]) + assertTrue(e.writeErrors[0].message.contains("E11000")) + } + + verify(syncOperations, times(2)).insertManyAndSync( + eq(listOf(doc1, doc2))) + + verify(ctx.dataSynchronizer, times(2)).insertManyAndSync( + eq(ctx.namespace), eq(listOf(doc1, doc2))) + } + + @Test + fun testInsertManyAndSyncCustomCodec() { + val ctx = harness.freshTestContext() + val (coreSync, syncOperations) = harness.createCoreSyncWithContext( + ctx, CustomCodecConsideredHarmful::class.java, CustomCodecConsideredHarmfulCodec()) + + val doc1 = CustomCodecConsideredHarmful(true, "Edsger Dijkstra") + val doc2 = CustomCodecConsideredHarmful(false, "Eric A. Meyer") + + val result = coreSync.insertManyAndSync(listOf(doc1, doc2)) + + val actualDoc1 = coreSync.find(Document(mapOf("consideredHarmful" to true)), BsonDocument::class.java).first() + val actualDoc2 = coreSync.find(Document(mapOf("consideredHarmful" to false)), BsonDocument::class.java).first() + + assertEquals(actualDoc1!!["_id"], result.insertedIds[0]) + assertEquals(actualDoc2!!["_id"], result.insertedIds[1]) + + verify(syncOperations, times(1)).insertManyAndSync( + eq(listOf(doc1, doc2))) + + verify(ctx.dataSynchronizer, times(1)).insertManyAndSync( + eq(ctx.namespace), eq(listOf(actualDoc1, actualDoc2))) + } + + @Test + fun testDeleteOne() { val ctx = harness.freshTestContext() - val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx) + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) - var deleteResult = coreSync.deleteOneById(ctx.testDocumentId) + var deleteResult = coreSync.deleteOne(ctx.testDocumentFilter) assertEquals(0, deleteResult.deletedCount) ctx.insertTestDocument() - deleteResult = coreSync.deleteOneById(ctx.testDocumentId) + deleteResult = coreSync.deleteOne(ctx.testDocumentFilter) assertEquals(1, deleteResult.deletedCount) - verify(syncOperations, times(2)).deleteOneById( - eq(ctx.testDocumentId)) + verify(syncOperations, times(2)).deleteOne( + eq(ctx.testDocumentFilter)) + + verify(ctx.dataSynchronizer, times(2)).deleteOne( + eq(ctx.namespace), eq(ctx.testDocumentFilter)) + } + + @Test + fun testDeleteMany() { + val ctx = harness.freshTestContext() + val (coreSync, syncOperations) = harness.createCoreSyncWithContext(ctx, BsonDocument::class.java) + + val doc1 = BsonDocument("a", BsonString("b")) + val doc2 = BsonDocument("c", BsonString("d")) + val doc3 = BsonDocument("e", BsonString("f")) + + var deleteResult = coreSync.deleteMany(BsonDocument()) + assertEquals(0, deleteResult.deletedCount) + + val result = coreSync.insertManyAndSync(listOf(doc1, doc2, doc3)) + + assertEquals(3, coreSync.count()) + deleteResult = coreSync.deleteMany(BsonDocument("_id", BsonDocument("\$in", BsonArray(result.insertedIds.map { + it.value + })))) + + assertEquals(3, deleteResult.deletedCount) + + verify(syncOperations, times(1)).deleteMany( + eq(BsonDocument())) - verify(ctx.dataSynchronizer, times(2)).deleteOneById( - eq(ctx.namespace), eq(ctx.testDocumentId)) + verify(ctx.dataSynchronizer, times(1)).deleteMany( + eq(ctx.namespace), eq(BsonDocument())) } } diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerTestContext.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerTestContext.kt index f044ccbab..c3098f25f 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerTestContext.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerTestContext.kt @@ -10,6 +10,7 @@ import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.internal.CoreRemoteMongoCollectionImpl import org.bson.BsonDocument import org.bson.BsonValue +import org.bson.Document import java.io.Closeable import java.lang.Exception @@ -25,6 +26,7 @@ interface DataSynchronizerTestContext : Closeable { val namespace: MongoNamespace val testDocument: BsonDocument val testDocumentId: BsonValue + val testDocumentFilter: BsonDocument var updateDocument: BsonDocument val collectionMock: CoreRemoteMongoCollectionImpl @@ -62,7 +64,7 @@ interface DataSynchronizerTestContext : Closeable { /** * Wait for an event to be emitted. */ - fun waitForEvent() + fun waitForEvents(amount: Int = 1) /** * Reconfigure dataSynchronizer. Insert the contextual test document. @@ -93,7 +95,7 @@ interface DataSynchronizerTestContext : Closeable { /** * Verify the changeEventListener was called for the test document. */ - fun verifyChangeEventListenerCalledForActiveDoc(times: Int, expectedChangeEvent: ChangeEvent? = null) + fun verifyChangeEventListenerCalledForActiveDoc(times: Int, vararg expectedChangeEvents: ChangeEvent = arrayOf()) /** * Verify the errorListener was called for the test document. @@ -112,7 +114,7 @@ interface DataSynchronizerTestContext : Closeable { /** * Verify the stream function was called. */ - fun verifyWatchFunctionCalled(times: Int, expectedArgs: List) + fun verifyWatchFunctionCalled(times: Int, expectedArgs: Document) /** * Verify dataSynchronizer.start() has been called. @@ -132,7 +134,10 @@ interface DataSynchronizerTestContext : Closeable { /** * Queue a pseudo-remote update event to be consumed during R2L. */ - fun queueConsumableRemoteUpdateEvent() + fun queueConsumableRemoteUpdateEvent( + id: BsonValue = testDocumentId, + document: BsonDocument = testDocument + ) /** * Queue a pseudo-remote delete event to be consumed during R2L. diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerUnitTests.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerUnitTests.kt index b490c1225..5ddcf0bbf 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerUnitTests.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/DataSynchronizerUnitTests.kt @@ -1,5 +1,7 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync.internal +import com.mongodb.client.model.CountOptions +import com.mongodb.client.model.UpdateOptions import com.mongodb.stitch.core.StitchServiceErrorCode import com.mongodb.stitch.core.StitchServiceException import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult @@ -8,10 +10,15 @@ import com.mongodb.stitch.core.services.mongodb.remote.sync.internal.SyncUnitTes import com.mongodb.stitch.server.services.mongodb.local.internal.ServerEmbeddedMongoClientFactory import org.bson.BsonDocument import org.bson.BsonInt32 +import org.bson.BsonString +import org.bson.Document +import org.bson.codecs.BsonDocumentCodec +import org.bson.codecs.configuration.CodecRegistries import org.junit.After import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse +import org.junit.Assert.assertNotNull import org.junit.Assert.assertNull import org.junit.Assert.assertTrue import org.junit.Test @@ -52,15 +59,18 @@ class DataSynchronizerUnitTests { if (shouldWaitForError) { ctx.waitForError() } else { - ctx.waitForEvent() + ctx.waitForEvents() } val expectedChangeEvent = if (shouldConflictBeResolvedByRemote) ChangeEvent.changeEventForLocalDelete(ctx.namespace, ctx.testDocumentId, false) else ChangeEvent.changeEventForLocalInsert(ctx.namespace, expectedDocument, true) + + val expectedChangeEvents = if (shouldWaitForError) emptyArray>() else arrayOf(expectedChangeEvent) + ctx.verifyChangeEventListenerCalledForActiveDoc( - times = if (shouldWaitForError) 0 else 1, - expectedChangeEvent = if (shouldWaitForError) null else expectedChangeEvent) + if (shouldWaitForError) 0 else 1, + *expectedChangeEvents) ctx.verifyConflictHandlerCalledForActiveDoc(times = 1) ctx.verifyErrorListenerCalledForActiveDoc(times = if (shouldWaitForError) 1 else 0, error = if (shouldWaitForError) ctx.exceptionToThrowDuringConflict else null) @@ -119,18 +129,18 @@ class DataSynchronizerUnitTests { // insert the doc, wait, sync, and assert that the expected change events are emitted ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert( + 1, + ChangeEvent.changeEventForLocalInsert( ctx.namespace, ctx.testDocument, true)) ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert( + 1, + ChangeEvent.changeEventForLocalInsert( ctx.namespace, ctx.testDocument, false)) @@ -161,10 +171,10 @@ class DataSynchronizerUnitTests { // sync and assert that the conflict handler was called, // accepting the remote delete, nullifying the document ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalDelete(ctx.namespace, ctx.testDocumentId, false)) + 1, + ChangeEvent.changeEventForLocalDelete(ctx.namespace, ctx.testDocumentId, false)) ctx.verifyConflictHandlerCalledForActiveDoc( times = 1, expectedLocalConflictEvent = ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, true), @@ -182,10 +192,10 @@ class DataSynchronizerUnitTests { // assert that the local doc has been inserted ctx.shouldConflictBeResolvedByRemote = false ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, true)) + 1, + ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, true)) ctx.verifyConflictHandlerCalledForActiveDoc( times = 1, expectedLocalConflictEvent = ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, true), @@ -238,7 +248,7 @@ class DataSynchronizerUnitTests { // insert the document, prepare for an error ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // sync, verifying that the expected exceptionToThrow was emitted, pausing the document ctx.doSyncPass() @@ -264,10 +274,10 @@ class DataSynchronizerUnitTests { ctx.mockUpdateResult(RemoteUpdateResult(1, 1, null)) ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert( + 1, + ChangeEvent.changeEventForLocalInsert( ctx.namespace, expectedDocument, false)) ctx.verifyConflictHandlerCalledForActiveDoc(times = 0) ctx.verifyErrorListenerCalledForActiveDoc(times = 0) @@ -286,12 +296,12 @@ class DataSynchronizerUnitTests { // do a sync pass, addressing the conflict ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() // verify that a change event has been emitted. the conflict will have been handled // in setupPendingReplace ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert( + 1, + ChangeEvent.changeEventForLocalInsert( ctx.namespace, expectedDoc, false )) ctx.verifyConflictHandlerCalledForActiveDoc(times = 0) @@ -358,15 +368,15 @@ class DataSynchronizerUnitTests { // insert, sync the doc, update, and verify that the change event was emitted ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.doSyncPass() - ctx.waitForEvent() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) + ctx.waitForEvents() + ctx.verifyChangeEventListenerCalledForActiveDoc(1, + ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) ctx.updateTestDocument() - ctx.waitForEvent() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalUpdate( + ctx.waitForEvents() + ctx.verifyChangeEventListenerCalledForActiveDoc(1, + ChangeEvent.changeEventForLocalUpdate( ctx.namespace, ctx.testDocumentId, ChangeEvent.UpdateDescription(BsonDocument("count", BsonInt32(2)), listOf()), @@ -378,8 +388,8 @@ class DataSynchronizerUnitTests { // was of the correct doc, and that no conflicts or errors occured ctx.mockUpdateResult(RemoteUpdateResult(1, 1, null)) ctx.doSyncPass() - ctx.waitForEvent() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, expectedChangeEvent = ChangeEvent.changeEventForLocalUpdate( + ctx.waitForEvents() + ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalUpdate( ctx.namespace, ctx.testDocumentId, ChangeEvent.UpdateDescription(BsonDocument("count", BsonInt32(2)), listOf()), @@ -424,22 +434,21 @@ class DataSynchronizerUnitTests { // 1: Update -> Conflict -> Delete (remote wins) // insert a new document, and sync. ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.doSyncPass() // update the document and wait for the local update event ctx.updateTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, - expectedChangeEvent = expectedLocalEvent) + ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedLocalEvent) // create conflict here by claiming there is no remote doc to update ctx.mockUpdateResult(RemoteUpdateResult(0, 0, null)) // do a sync pass, addressing the conflict ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() // verify that a change event has been emitted, a conflict has been handled, // and no errors were emitted ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1) @@ -463,27 +472,28 @@ class DataSynchronizerUnitTests { ctx.mockUpdateResult(RemoteUpdateResult(0, 0, null)) ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.doSyncPass() - ctx.waitForEvent() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, expectedChangeEvent = - ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) + ctx.waitForEvents() + ctx.verifyChangeEventListenerCalledForActiveDoc( + 1, + ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) // update the document and wait for the local update event ctx.updateTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // do a sync pass, addressing the conflict. let local win ctx.shouldConflictBeResolvedByRemote = false ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() // verify that a change event has been emitted, a conflict has been handled, // and no errors were emitted ctx.verifyChangeEventListenerCalledForActiveDoc( - times = 1, - expectedChangeEvent = ChangeEvent.changeEventForLocalInsert(ctx.namespace, docAfterUpdate, true)) + 1, + ChangeEvent.changeEventForLocalInsert(ctx.namespace, docAfterUpdate, true)) ctx.verifyConflictHandlerCalledForActiveDoc(1, expectedLocalEvent, expectedRemoteEvent) ctx.verifyErrorListenerCalledForActiveDoc(0) @@ -508,16 +518,16 @@ class DataSynchronizerUnitTests { ctx.mockUpdateResult(RemoteUpdateResult(0, 0, null)) ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc( 1, ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) ctx.doSyncPass() // update the reset doc ctx.updateTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // prepare an exceptionToThrow to be thrown, and sync ctx.exceptionToThrowDuringConflict = Exception("bad") @@ -562,7 +572,7 @@ class DataSynchronizerUnitTests { @Test fun testFailedUpdate() { val ctx = harness.freshTestContext() - // set up expectations and insert + // set up expectations and insert val docAfterUpdate = BsonDocument("count", BsonInt32(2)).append("_id", ctx.testDocumentId) val expectedEvent = ChangeEvent.changeEventForLocalUpdate( ctx.namespace, @@ -573,13 +583,13 @@ class DataSynchronizerUnitTests { ) ctx.insertTestDocument() ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() // update the inserted doc, and prepare our exceptionToThrow ctx.updateTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() - ctx.verifyChangeEventListenerCalledForActiveDoc(times = 1, expectedChangeEvent = expectedEvent) + ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedEvent) val expectedException = StitchServiceException("bad", StitchServiceErrorCode.UNKNOWN) ctx.mockUpdateException(expectedException) @@ -622,15 +632,15 @@ class DataSynchronizerUnitTests { // insert a new document. assert that the correct change events // have been reflected w/ and w/o pending writes ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, true)) ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalInsert(ctx.namespace, ctx.testDocument, false)) // delete the document and wait ctx.deleteTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // verify a delete event with pending writes is called ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalDelete( @@ -642,7 +652,7 @@ class DataSynchronizerUnitTests { // sync. verify the correct doc was deleted and that a change event // with no pending writes was emitted ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() val docCaptor = ArgumentCaptor.forClass(BsonDocument::class.java) verify(ctx.collectionMock, times(1)).deleteOne(docCaptor.capture()) assertEquals(ctx.testDocument["_id"], docCaptor.value["_id"]) @@ -667,10 +677,10 @@ class DataSynchronizerUnitTests { ctx.insertTestDocument() ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.deleteTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedLocalEvent) @@ -680,7 +690,7 @@ class DataSynchronizerUnitTests { ctx.queueConsumableRemoteUpdateEvent() ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalReplace( ctx.namespace, @@ -707,10 +717,10 @@ class DataSynchronizerUnitTests { ctx.insertTestDocument() ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.deleteTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedLocalEvent) @@ -719,7 +729,7 @@ class DataSynchronizerUnitTests { ctx.queueConsumableRemoteUpdateEvent() ctx.shouldConflictBeResolvedByRemote = false ctx.doSyncPass() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalDelete( ctx.namespace, @@ -747,14 +757,14 @@ class DataSynchronizerUnitTests { ) ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.doSyncPass() ctx.deleteTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() - ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedChangeEvent = expectedEvent) + ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedEvent) val expectedException = StitchServiceException("bad", StitchServiceErrorCode.UNKNOWN) ctx.mockDeleteException(expectedException) @@ -773,6 +783,65 @@ class DataSynchronizerUnitTests { assertNull(ctx.findTestDocumentFromLocalCollection()) } + @Test + fun testCount() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + assertEquals(0, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + + val doc1 = BsonDocument("hello", BsonString("world")) + val doc2 = BsonDocument("goodbye", BsonString("computer")) + + ctx.dataSynchronizer.insertManyAndSync(ctx.namespace, listOf(doc1, doc2)) + + assertEquals(2, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + + assertEquals(1, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument(), CountOptions().limit(1))) + + assertEquals(1, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument("_id", doc1["_id"]))) + + ctx.dataSynchronizer.deleteMany(ctx.namespace, BsonDocument()) + + assertEquals(0, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + } + + @Test + fun testAggregate() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + assertEquals(0, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + + val doc1 = BsonDocument("hello", BsonString("world")).append("a", BsonString("b")) + val doc2 = BsonDocument("hello", BsonString("computer")).append("a", BsonString("b")) + + ctx.dataSynchronizer.insertManyAndSync(ctx.namespace, listOf(doc1, doc2)) + + val iterable = ctx.dataSynchronizer.aggregate(ctx.namespace, + listOf( + BsonDocument( + "\$project", + BsonDocument("_id", BsonInt32(0)) + .append("a", BsonInt32(0)) + ), + BsonDocument( + "\$match", + BsonDocument("hello", BsonString("computer")) + ))) + + assertEquals(2, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + assertEquals(1, iterable.count()) + + val actualDoc = iterable.first()!! + + assertNull(actualDoc["a"]) + assertNull(actualDoc["_id"]) + assertEquals(BsonString("computer"), actualDoc["hello"]) + } + @Test fun testInsertOneAndSync() { val ctx = harness.freshTestContext() @@ -784,7 +853,7 @@ class DataSynchronizerUnitTests { ctx.deleteTestDocument() ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, expectedEvent) @@ -792,7 +861,49 @@ class DataSynchronizerUnitTests { } @Test - fun testUpdateOneById() { + fun testInsertManyAndSync() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + val doc1 = BsonDocument("hello", BsonString("world")) + val doc2 = BsonDocument("goodbye", BsonString("computer")) + + ctx.dataSynchronizer.insertManyAndSync(ctx.namespace, listOf(doc1, doc2)) + + val expectedEvent1 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, doc1, true) + val expectedEvent2 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, doc2, true) + + ctx.waitForEvents(amount = 2) + + ctx.verifyChangeEventListenerCalledForActiveDoc(2, expectedEvent1, expectedEvent2) + + assertEquals( + doc1, + ctx.dataSynchronizer.find( + ctx.namespace, + BsonDocument("_id", doc1["_id"]), + 0, + null, + null, + BsonDocument::class.java, + CodecRegistries.fromCodecs(BsonDocumentCodec()) + ).firstOrNull()) + assertEquals( + doc2, + ctx.dataSynchronizer.find( + ctx.namespace, + BsonDocument("_id", doc2["_id"]), + 0, + null, + null, + BsonDocument::class.java, + CodecRegistries.fromCodecs(BsonDocumentCodec()) + ).firstOrNull()) + } + + @Test + fun testUpdateOne() { val ctx = harness.freshTestContext() val expectedDocumentAfterUpdate = BsonDocument("count", BsonInt32(2)).append("_id", ctx.testDocumentId) // assert this doc does not exist @@ -810,12 +921,12 @@ class DataSynchronizerUnitTests { // insert the initial document ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1) // do the actual update updateResult = ctx.updateTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // assert the UpdateResult is non-zero assertEquals(1, updateResult.matchedCount) @@ -832,7 +943,215 @@ class DataSynchronizerUnitTests { } @Test - fun testDeleteOneById() { + fun testUpsertOne() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + val doc1 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(1)) + + val result = ctx.dataSynchronizer.updateOne( + ctx.namespace, + BsonDocument("name", BsonString("philip")), + BsonDocument("\$inc", BsonDocument("count", BsonInt32(1))), + UpdateOptions().upsert(true)) + + assertEquals(1, result.matchedCount) + assertEquals(1, result.modifiedCount) + assertNotNull(result.upsertedId) + + val expectedEvent1 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, + doc1.append("_id", result.upsertedId), true) + + ctx.waitForEvents(amount = 1) + + ctx.verifyChangeEventListenerCalledForActiveDoc( + 1, + expectedEvent1) + + assertEquals( + doc1, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc1["_id"])).first()) + + // assert that the stream was opened + ctx.verifyWatchFunctionCalled(1, expectedArgs = + Document(mapOf( + "database" to ctx.namespace.databaseName, + "collection" to ctx.namespace.collectionName, + "ids" to setOf(result.upsertedId) + ))) + + ctx.dataSynchronizer.updateMany( + ctx.namespace, + BsonDocument("name", BsonString("philip")), + BsonDocument("\$inc", BsonDocument("count", BsonInt32(1)))) + + ctx.waitForEvents(amount = 2) + + val expectedDocAfterUpdate1 = BsonDocument("name", BsonString("philip")) + .append("count", BsonInt32(2)).append("_id", doc1["_id"]) + + assertEquals( + expectedDocAfterUpdate1, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc1["_id"])).first()) + } + + @Test + fun testUpdateMany() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + val doc1 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(1)) + val doc2 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(1)) + val doc3 = BsonDocument("name", BsonString("timothy")).append("count", BsonInt32(1)) + + ctx.dataSynchronizer.insertManyAndSync(ctx.namespace, listOf(doc1, doc2, doc3)) + + val expectedEvent1 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, doc1, true) + val expectedEvent2 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, doc2, true) + val expectedEvent3 = ChangeEvent.changeEventForLocalInsert(ctx.namespace, doc3, true) + + ctx.waitForEvents(amount = 3) + + ctx.verifyChangeEventListenerCalledForActiveDoc( + 3, + expectedEvent1, + expectedEvent2, + expectedEvent3) + + val result = ctx.dataSynchronizer.updateMany( + ctx.namespace, + BsonDocument("name", BsonString("philip")), + BsonDocument("\$set", BsonDocument("count", BsonInt32(2))), + UpdateOptions().upsert(true)) // ensure there wasn't an unnecessary insert + + ctx.findTestDocumentFromLocalCollection() + assertEquals(2, result.modifiedCount) + assertEquals(2, result.matchedCount) + assertNull(result.upsertedId) + + ctx.waitForEvents(amount = 2) + + val expectedDocAfterUpdate1 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(2)).append("_id", doc1["_id"]) + val expectedDocAfterUpdate2 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(2)).append("_id", doc2["_id"]) + + ctx.verifyChangeEventListenerCalledForActiveDoc( + 5, + expectedEvent1, + expectedEvent2, + expectedEvent3, + ChangeEvent.changeEventForLocalUpdate( + ctx.namespace, + doc1["_id"], + ChangeEvent.UpdateDescription( + BsonDocument("count", BsonInt32(2)), + listOf() + ), + expectedDocAfterUpdate1, + true), + ChangeEvent.changeEventForLocalUpdate( + ctx.namespace, + doc2["_id"], + ChangeEvent.UpdateDescription( + BsonDocument("count", BsonInt32(2)), + listOf() + ), + expectedDocAfterUpdate2, + true)) + + assertEquals( + expectedDocAfterUpdate1, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc1["_id"])).first()) + assertEquals( + expectedDocAfterUpdate2, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc2["_id"])).first()) + assertEquals( + doc3, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc3["_id"])).first()) + } + + @Test + fun testUpsertMany() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + val doc1 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(2)) + + var result = ctx.dataSynchronizer.updateMany( + ctx.namespace, + BsonDocument("name", BsonString("philip")), + BsonDocument("\$set", BsonDocument("count", BsonInt32(2))), + UpdateOptions().upsert(true)) + + assertEquals(0, result.matchedCount) + assertEquals(0, result.modifiedCount) + assertNotNull(result.upsertedId) + + val expectedEvent1 = ChangeEvent.changeEventForLocalInsert( + ctx.namespace, + doc1.append("_id", result.upsertedId), true) + + ctx.waitForEvents(amount = 1) + + ctx.verifyChangeEventListenerCalledForActiveDoc( + 1, + expectedEvent1) + + assertEquals( + doc1, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", result.upsertedId)).first()) + + // assert that the stream was opened + ctx.verifyWatchFunctionCalled(1, expectedArgs = + Document(mapOf( + "database" to ctx.namespace.databaseName, + "collection" to ctx.namespace.collectionName, + "ids" to setOf(result.upsertedId) + ))) + + ctx.doSyncPass() + + ctx.queueConsumableRemoteUpdateEvent( + id = result.upsertedId!!, + document = BsonDocument( + "name", + BsonString("philip") + ).append("count", BsonInt32(3)).append("_id", result.upsertedId)) + ctx.doSyncPass() + assertEquals( + BsonDocument("name", BsonString("philip")).append("count", BsonInt32(3)).append("_id", result.upsertedId), + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", result.upsertedId)).first()) + + val doc2 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(1)) + + ctx.dataSynchronizer.insertOneAndSync(ctx.namespace, doc2) + + result = ctx.dataSynchronizer.updateMany( + ctx.namespace, + BsonDocument("name", BsonString("philip")), + BsonDocument("\$set", BsonDocument("count", BsonInt32(3))), + UpdateOptions().upsert(true)) + + assertEquals(2, result.matchedCount) + assertEquals(1, result.modifiedCount) + assertNull(result.upsertedId) + + // there should only be 2 events instead of 3 since only 1 document was modified + ctx.waitForEvents(amount = 2) + + val expectedDocAfterUpdate1 = BsonDocument("name", BsonString("philip")).append("count", BsonInt32(3)).append("_id", doc1["_id"]) + + assertEquals( + expectedDocAfterUpdate1, + ctx.dataSynchronizer.find(ctx.namespace, BsonDocument("_id", doc1["_id"])).first()) + + assertTrue(ctx.dataSynchronizer.areAllStreamsOpen()) + } + + @Test + fun testDeleteOne() { val ctx = harness.freshTestContext() // 0: Pre-checks @@ -850,7 +1169,7 @@ class DataSynchronizerUnitTests { // 1: Insert -> Delete -> Coalescence // insert the initial document ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1) // do the actual delete @@ -870,7 +1189,7 @@ class DataSynchronizerUnitTests { // do the actual delete deleteResult = ctx.deleteTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() // assert the UpdateResult is non-zero assertEquals(1, deleteResult.deletedCount) @@ -883,6 +1202,30 @@ class DataSynchronizerUnitTests { assertNull(ctx.findTestDocumentFromLocalCollection()) } + @Test + fun testDeleteMany() { + val ctx = harness.freshTestContext() + + ctx.reconfigure() + + var result = ctx.dataSynchronizer.deleteMany(ctx.namespace, BsonDocument()) + assertEquals(0, result.deletedCount) + assertEquals(0, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + + val doc1 = BsonDocument("hello", BsonString("world")) + val doc2 = BsonDocument("goodbye", BsonString("computer")) + + ctx.dataSynchronizer.insertManyAndSync(ctx.namespace, listOf(doc1, doc2)) + + assertEquals(2, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + + result = ctx.dataSynchronizer.deleteMany(ctx.namespace, BsonDocument()) + + assertEquals(2, result.deletedCount) + assertEquals(0, ctx.dataSynchronizer.count(ctx.namespace, BsonDocument())) + assertNull(ctx.dataSynchronizer.find(ctx.namespace, BsonDocument()).firstOrNull()) + } + @Test fun testConfigure() { val ctx = harness.freshTestContext(false) @@ -900,7 +1243,7 @@ class DataSynchronizerUnitTests { ctx.deleteTestDocument() ctx.insertTestDocument() - ctx.waitForEvent() + ctx.waitForEvents() ctx.verifyChangeEventListenerCalledForActiveDoc(1, ChangeEvent.changeEventForLocalInsert( ctx.namespace, ctx.testDocument, true)) diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/NamespaceChangeStreamListenerUnitTests.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/NamespaceChangeStreamListenerUnitTests.kt index e7d1a8143..72907e3f2 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/NamespaceChangeStreamListenerUnitTests.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/NamespaceChangeStreamListenerUnitTests.kt @@ -3,6 +3,7 @@ package com.mongodb.stitch.core.services.mongodb.remote.sync.internal import com.mongodb.stitch.core.internal.net.Event import com.mongodb.stitch.server.services.mongodb.local.internal.ServerEmbeddedMongoClientFactory import org.bson.BsonObjectId +import org.bson.Document import org.junit.After import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse @@ -12,7 +13,6 @@ import org.mockito.ArgumentMatchers.eq import org.mockito.Mockito.`when` import org.mockito.Mockito.times import org.mockito.Mockito.verify -import java.util.Collections class NamespaceChangeStreamListenerUnitTests { private val harness = SyncUnitTestHarness() @@ -47,7 +47,7 @@ class NamespaceChangeStreamListenerUnitTests { // set the nsConfig to stale `when`(nsConfigMock.synchronizedDocumentIds).thenReturn(setOf(BsonObjectId())) assertTrue(namespaceChangeStreamListener.openStream()) - val expectedArgs = Collections.singletonList(mapOf( + val expectedArgs = Document(mapOf( "database" to ctx.namespace.databaseName, "collection" to ctx.namespace.collectionName, "ids" to nsConfigMock.synchronizedDocumentIds diff --git a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncUnitTestHarness.kt b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncUnitTestHarness.kt index ac0581a89..fc3458839 100644 --- a/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncUnitTestHarness.kt +++ b/core/services/mongodb-remote/src/test/java/com/mongodb/stitch/core/services/mongodb/remote/sync/internal/SyncUnitTestHarness.kt @@ -29,7 +29,10 @@ import org.bson.BsonInt32 import org.bson.BsonObjectId import org.bson.BsonString import org.bson.BsonValue +import org.bson.Document import org.bson.codecs.BsonDocumentCodec +import org.bson.codecs.Codec +import org.bson.codecs.DocumentCodec import org.bson.codecs.configuration.CodecRegistries import org.bson.types.ObjectId import org.junit.Assert @@ -44,9 +47,11 @@ import org.mockito.Mockito.spy import org.mockito.Mockito.times import java.io.Closeable import java.lang.Exception +import java.util.Collections +import java.util.Random import java.util.concurrent.Semaphore import java.util.concurrent.TimeUnit -import java.util.Random +import java.util.concurrent.locks.ReentrantLock class SyncUnitTestHarness : Closeable { companion object { @@ -130,21 +135,7 @@ class SyncUnitTestHarness : Closeable { } } - private open class TestChangeEventListener( - private val expectedEvent: ChangeEvent?, - private val emitEventSemaphore: Semaphore? - ) : ChangeEventListener { - override fun onEvent(documentId: BsonValue?, actualEvent: ChangeEvent?) { - try { - if (expectedEvent != null) { - compareEvents(expectedEvent, actualEvent!!) - Assert.assertEquals(expectedEvent.id, documentId) - } - } finally { - emitEventSemaphore?.release() - } - } - } + val waitLock = ReentrantLock() fun newDoc(key: String = "hello", value: BsonValue = BsonString("world")): BsonDocument { return BsonDocument("_id", BsonObjectId()).append(key, value) @@ -220,13 +211,37 @@ class SyncUnitTestHarness : Closeable { private fun newChangeEventListener( emitEventSemaphore: Semaphore? = null, expectedEvent: ChangeEvent? = null - ): ChangeEventListener { - return Mockito.spy(TestChangeEventListener(expectedEvent, emitEventSemaphore)) + ): DataSynchronizerTestContextImpl.TestChangeEventListener { + return Mockito.spy(DataSynchronizerTestContextImpl.TestChangeEventListener(expectedEvent, emitEventSemaphore)) } } @Suppress("UNCHECKED_CAST") private class DataSynchronizerTestContextImpl(shouldPreconfigure: Boolean = true) : DataSynchronizerTestContext { + open class TestChangeEventListener( + private val expectedEvent: ChangeEvent?, + var emitEventSemaphore: Semaphore? + ) : ChangeEventListener { + val eventAccumulator = mutableListOf>() + var totalEventsToAccumulate = 0 + + override fun onEvent(documentId: BsonValue?, actualEvent: ChangeEvent?) { + waitLock.lock() + try { + eventAccumulator.add(actualEvent!!) + if (expectedEvent != null) { + compareEvents(expectedEvent, actualEvent) + Assert.assertEquals(expectedEvent.id, documentId) + } + } finally { + if (eventAccumulator.size >= totalEventsToAccumulate) { + emitEventSemaphore?.release() + } + waitLock.unlock() + } + } + } + override val collectionMock: CoreRemoteMongoCollectionImpl = Mockito.mock(CoreRemoteMongoCollectionImpl::class.java) as CoreRemoteMongoCollectionImpl @@ -234,6 +249,7 @@ class SyncUnitTestHarness : Closeable { private val streamMock = Stream(TestEventStream(this), ChangeEvent.changeEventCoder) override val testDocument = newDoc("count", BsonInt32(1)) override val testDocumentId: BsonObjectId by lazy { testDocument["_id"] as BsonObjectId } + override val testDocumentFilter by lazy { BsonDocument("_id", testDocumentId) } override var updateDocument: BsonDocument = BsonDocument("\$inc", BsonDocument("count", BsonInt32(1))) private val bsonDocumentCodec = BsonDocumentCodec() @@ -354,8 +370,16 @@ class SyncUnitTestHarness : Closeable { bsonDocumentCodec) } - override fun waitForEvent() { - assertTrue(eventSemaphore?.tryAcquire(10, TimeUnit.SECONDS) ?: true) + override fun waitForEvents(amount: Int) { + waitLock.lock() + changeEventListener.totalEventsToAccumulate = amount + if (changeEventListener.totalEventsToAccumulate > changeEventListener.eventAccumulator.size) { + // means sem has been called and we need to wait for more events + eventSemaphore = Semaphore(0) + changeEventListener.emitEventSemaphore = eventSemaphore + } + waitLock.unlock() + assertTrue(changeEventListener.emitEventSemaphore?.tryAcquire(10, TimeUnit.SECONDS) ?: true) } override fun waitForError() { @@ -378,7 +402,11 @@ class SyncUnitTestHarness : Closeable { configureNewErrorListener() configureNewConflictHandler() - return dataSynchronizer.updateOneById(namespace, testDocumentId, updateDocument) + return dataSynchronizer.updateOne( + namespace, + BsonDocument("_id", testDocumentId), + updateDocument + ) } override fun deleteTestDocument(): DeleteResult { @@ -386,7 +414,7 @@ class SyncUnitTestHarness : Closeable { configureNewErrorListener() configureNewConflictHandler() - return dataSynchronizer.deleteOneById(namespace, testDocumentId) + return dataSynchronizer.deleteOne(namespace, BsonDocument("_id", testDocumentId)) } override fun doSyncPass() { @@ -403,10 +431,13 @@ class SyncUnitTestHarness : Closeable { mapOf()) } - override fun queueConsumableRemoteUpdateEvent() { + override fun queueConsumableRemoteUpdateEvent( + id: BsonValue, + document: BsonDocument + ) { `when`(dataSynchronizer.getEventsForNamespace(any())).thenReturn( - mapOf(testDocument to ChangeEvent.changeEventForLocalUpdate( - namespace, testDocumentId, null, testDocument, false)), + mapOf(document to ChangeEvent.changeEventForLocalUpdate( + namespace, id, null, document, false)), mapOf()) } @@ -431,21 +462,31 @@ class SyncUnitTestHarness : Closeable { override fun findTestDocumentFromLocalCollection(): BsonDocument? { // TODO: this may be rendered unnecessary with STITCH-1972 return withoutSyncVersion( - dataSynchronizer.findOneById( + dataSynchronizer.find( namespace, - testDocumentId, + BsonDocument("_id", testDocumentId), + 10, + null, + null, BsonDocument::class.java, - CodecRegistries.fromCodecs(bsonDocumentCodec))) + CodecRegistries.fromCodecs(bsonDocumentCodec)).firstOrNull()) } - override fun verifyChangeEventListenerCalledForActiveDoc(times: Int, expectedChangeEvent: ChangeEvent?) { + override fun verifyChangeEventListenerCalledForActiveDoc( + times: Int, + vararg expectedChangeEvents: ChangeEvent + ) { val changeEventArgumentCaptor = ArgumentCaptor.forClass(ChangeEvent::class.java) Mockito.verify(changeEventListener, times(times)).onEvent( - eq(testDocumentId), + any(), changeEventArgumentCaptor.capture() as ChangeEvent?) - if (expectedChangeEvent != null) { - compareEvents(expectedChangeEvent, changeEventArgumentCaptor.value as ChangeEvent) + if (expectedChangeEvents.isNotEmpty()) { + changeEventArgumentCaptor.allValues.forEachIndexed { i, actualChangeEvent -> + compareEvents( + expectedChangeEvents[i], + actualChangeEvent as ChangeEvent) + } } } @@ -475,8 +516,9 @@ class SyncUnitTestHarness : Closeable { } } - override fun verifyWatchFunctionCalled(times: Int, expectedArgs: List) { - Mockito.verify(service, times(times)).streamFunction(eq("watch"), eq(expectedArgs), eq(ChangeEvent.changeEventCoder)) + override fun verifyWatchFunctionCalled(times: Int, expectedArgs: Document) { + Mockito.verify(service, times(times)).streamFunction( + eq("watch"), eq(Collections.singletonList(expectedArgs)), eq(ChangeEvent.changeEventCoder)) } override fun verifyStartCalled(times: Int) { @@ -561,15 +603,20 @@ class SyncUnitTestHarness : Closeable { return namespaceChangeStreamListener to nsConfigMock } - internal fun createCoreSyncWithContext(context: DataSynchronizerTestContext): Pair, SyncOperations> { + internal fun createCoreSyncWithContext( + context: DataSynchronizerTestContext, + resultClass: Class, + codec: Codec? = null + ): + Pair, SyncOperations> { val syncOperations = Mockito.spy(SyncOperations( context.namespace, - BsonDocument::class.java, + resultClass, context.dataSynchronizer, - CodecRegistries.fromCodecs(BsonDocumentCodec()))) + CodecRegistries.fromCodecs(codec ?: BsonDocumentCodec(), DocumentCodec()))) val coreSync = CoreSyncImpl( context.namespace, - BsonDocument::class.java, + resultClass, context.dataSynchronizer, (context as DataSynchronizerTestContextImpl).service, syncOperations) diff --git a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/ProxySyncMethods.kt b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/ProxySyncMethods.kt index cf13c4df2..124bd0ca2 100644 --- a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/ProxySyncMethods.kt +++ b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/ProxySyncMethods.kt @@ -1,11 +1,14 @@ package com.mongodb.stitch.core.testutils.sync -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult +import org.bson.BsonDocument import org.bson.BsonValue import org.bson.Document import org.bson.conversions.Bson @@ -15,98 +18,45 @@ import org.bson.conversions.Bson * [com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSync]. */ interface ProxySyncMethods { - /** - * Set the conflict handler and and change event listener on this collection. - * @param conflictResolver the conflict resolver to invoke when a conflict happens between local - * and remote events. - * @param changeEventListener the event listener to invoke when a change event happens for the - * document. - * @param errorListener the error listener to invoke when an irrecoverable error occurs - */ fun configure( conflictResolver: ConflictHandler, changeEventListener: ChangeEventListener?, errorListener: ErrorListener? ) - /** - * Requests that the given document _id be synchronized. - * @param id the document _id to synchronize. - */ fun syncOne(id: BsonValue) - /** - * Stops synchronizing the given document _id. Any uncommitted writes will be lost. - * - * @param id the _id of the document to desynchronize. - */ fun desyncOne(id: BsonValue) - /** - * Returns the set of synchronized document ids in a namespace. - * - * @return the set of synchronized document ids in a namespace. - */ fun getSyncedIds(): Set - /** - * Finds all documents in the collection. - * - * @param filter the query filter - * @return the find iterable interface - */ - fun find(filter: Bson): Iterable - - /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. - * - * @param id the _id of the document to search for. - * @return the document if found locally or remotely. - */ - fun findOneById(id: BsonValue): Document? - - /** - * Updates a document by the given id. It is first searched for in the local synchronized cache - * and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @param update the update specifier. - * @return the result of the local or remote update. - */ - fun updateOneById(documentId: BsonValue, update: Bson): RemoteUpdateResult - - /** - * Inserts a single document and begins to synchronize it. - * - * @param document the document to insert and synchronize. - * @return the result of the insertion. - */ - fun insertOneAndSync(document: Document): RemoteInsertOneResult - - /** - * Deletes a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. - * - * @param documentId the _id of the document to search for. - * @return the result of the local or remote update. - */ - fun deleteOneById(documentId: BsonValue): RemoteDeleteResult - - /** - * Return the set of synchronized document _ids in a namespace - * that have been paused due to an irrecoverable error. - * - * @return the set of paused document _ids in a namespace - */ + fun find(filter: Bson = BsonDocument()): Iterable + + fun aggregate(pipeline: List): Iterable + + fun count(filter: Bson = BsonDocument()): Long + + fun updateOne( + filter: Bson, + update: Bson, + updateOptions: SyncUpdateOptions = SyncUpdateOptions() + ): SyncUpdateResult + + fun updateMany( + filter: Bson, + update: Bson, + updateOptions: SyncUpdateOptions = SyncUpdateOptions() + ): SyncUpdateResult + + fun insertOneAndSync(document: Document): SyncInsertOneResult + + fun insertManyAndSync(documents: List): SyncInsertManyResult + + fun deleteOne(filter: Bson): SyncDeleteResult + + fun deleteMany(filter: Bson): SyncDeleteResult + fun getPausedDocumentIds(): Set - /** - * A document that is paused no longer has remote updates applied to it. - * Any local updates to this document cause it to be resumed. An example of pausing a document - * is when a conflict is being resolved for that document and the handler throws an exception. - * - * @param documentId the id of the document to resume syncing - */ fun resumeSyncForDocument(documentId: BsonValue): Boolean } diff --git a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestProxy.kt b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestProxy.kt index df5150fa0..eecb459f9 100644 --- a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestProxy.kt +++ b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestProxy.kt @@ -11,6 +11,7 @@ import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler import com.mongodb.stitch.core.services.mongodb.remote.sync.DefaultSyncConflictResolvers import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions import com.mongodb.stitch.core.services.mongodb.remote.sync.internal.ChangeEvent import org.bson.BsonBoolean import org.bson.BsonDocument @@ -21,6 +22,8 @@ import org.bson.Document import org.junit.Assert import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse +import org.junit.Assert.assertNotNull +import org.junit.Assert.assertNull import org.junit.Assert.assertTrue import org.junit.Ignore import org.junit.Test @@ -49,7 +52,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { fun testSync() { testSyncInBothDirections { val remoteMethods = syncTestRunner.remoteMethods() - val remoteOperations = syncTestRunner.syncMethods() + val syncOperations = syncTestRunner.syncMethods() val doc1 = Document("hello", "world") val doc2 = Document("hello", "friend") @@ -62,7 +65,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val doc1Filter = Document("_id", doc1Id) // start watching it and always set the value to hello world in a conflict - remoteOperations.configure(ConflictHandler { id: BsonValue, localEvent: ChangeEvent, remoteEvent: ChangeEvent -> + syncOperations.configure(ConflictHandler { id: BsonValue, localEvent: ChangeEvent, remoteEvent: ChangeEvent -> if (id == doc1Id) { val merged = localEvent.fullDocument.getInteger("foo") + remoteEvent.fullDocument.getInteger("foo") @@ -75,7 +78,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { }, null, null) // sync on the remote document - remoteOperations.syncOne(doc1Id) + syncOperations.syncOne(doc1Id) streamAndSync() // 1. updating a document remotely should not be reflected until coming back online. @@ -87,20 +90,22 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { assertEquals(1, result.matchedCount) streamAndSync() // because we are offline, the remote doc should not have updated - Assert.assertEquals(doc, remoteOperations.findOneById(doc1Id)) + Assert.assertEquals(doc, syncOperations.find(documentIdFilter(doc1Id)).firstOrNull()) // go back online, and sync // the remote document should now equal our expected update goOnline() streamAndSync() val expectedDocument = Document(doc) expectedDocument["foo"] = 1 - assertEquals(expectedDocument, remoteOperations.findOneById(doc1Id)) + assertEquals(expectedDocument, syncOperations.find(documentIdFilter(doc1Id)).firstOrNull()) // 2. insertOneAndSync should work offline and then sync the document when online. goOffline() val doc3 = Document("so", "syncy") - val insResult = remoteOperations.insertOneAndSync(doc3) - Assert.assertEquals(doc3, withoutSyncVersion(remoteOperations.findOneById(insResult.insertedId)!!)) + val insResult = syncOperations.insertOneAndSync(doc3) + Assert.assertEquals( + doc3, + withoutSyncVersion(syncOperations.find(documentIdFilter(insResult.insertedId)).firstOrNull()!!)) streamAndSync() Assert.assertNull(remoteMethods.find(Document("_id", doc3["_id"])).firstOrNull()) goOnline() @@ -117,21 +122,23 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { Assert.assertEquals(1, result2.matchedCount) expectedDocument["foo"] = 2 Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteMethods.find(doc1Filter).first()!!)) - val result3 = remoteOperations.updateOneById( - doc1Id, + val result3 = syncOperations.updateOne( + documentIdFilter(doc1Id), doc1Update) Assert.assertEquals(1, result3.matchedCount) expectedDocument["foo"] = 2 - Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteOperations.findOneById(doc1Id)!!)) + Assert.assertEquals( + expectedDocument, + withoutSyncVersion(syncOperations.find(documentIdFilter(doc1Id)).firstOrNull()!!)) // first pass will invoke the conflict handler and update locally but not remotely yet streamAndSync() Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteMethods.find(doc1Filter).first()!!)) expectedDocument["foo"] = 4 expectedDocument.remove("fooOps") - Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteOperations.findOneById(doc1Id)!!)) + Assert.assertEquals(expectedDocument, withoutSyncVersion(syncOperations.find(doc1Filter).first()!!)) // second pass will update with the ack'd version id streamAndSync() - Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteOperations.findOneById(doc1Id)!!)) + Assert.assertEquals(expectedDocument, withoutSyncVersion(syncOperations.find(doc1Filter).first()!!)) Assert.assertEquals(expectedDocument, withoutSyncVersion(remoteMethods.find(doc1Filter).first()!!)) } } @@ -174,21 +181,21 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // Update local val localUpdate = Document("\$set", Document("local", "updateWow")) - result = coll.updateOneById(doc1Id, localUpdate) + result = coll.updateOne(doc1Filter, localUpdate) assertEquals(1, result.matchedCount) val expectedLocalDocument = Document(doc) expectedLocalDocument["local"] = "updateWow" - assertEquals(expectedLocalDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedLocalDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // first pass will invoke the conflict handler and update locally but not remotely yet streamAndSync() assertEquals(expectedRemoteDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) expectedLocalDocument["remote"] = "update" - assertEquals(expectedLocalDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedLocalDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // second pass will update with the ack'd version id streamAndSync() - assertEquals(expectedLocalDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedLocalDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) assertEquals(expectedLocalDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) } } @@ -229,16 +236,16 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // update the local collection. // the count field locally should be 2 // the count field remotely should be 3 - result = coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))) + result = coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))) assertEquals(1, result.matchedCount) expectedDocument["foo"] = 2 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // sync the collection. the remote document should be accepted // and this resolution should be reflected locally and remotely streamAndSync() expectedDocument["foo"] = 3 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) streamAndSync() assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) } @@ -280,16 +287,16 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // update the local collection. // the count field locally should be 2 // the count field remotely should be 3 - result = coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))) + result = coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))) assertEquals(1, result.matchedCount) expectedDocument["foo"] = 2 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // sync the collection. the local document should be accepted // and this resolution should be reflected locally and remotely streamAndSync() expectedDocument["foo"] = 2 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) streamAndSync() assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) } @@ -319,13 +326,13 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // go offline to avoid processing events. // delete the document locally goOffline() - val result = coll.deleteOneById(doc1Id) + val result = coll.deleteOne(doc1Filter) assertEquals(1, result.deletedCount) // assert that, while the remote document remains val expectedDocument = withoutSyncVersion(Document(doc)) assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) // go online to begin the syncing process. // when syncing, our local delete will be synced to the remote. @@ -333,7 +340,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { goOnline() streamAndSync() Assert.assertNull(remoteColl.find(doc1Filter).firstOrNull()) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) } } @@ -368,7 +375,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // go offline, and delete the document locally goOffline() - val result = coll.deleteOneById(doc1Id) + val result = coll.deleteOne(doc1Filter) assertEquals(1, result.deletedCount) // assert that the remote document has not been deleted, @@ -376,7 +383,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val expectedDocument = Document(doc) expectedDocument["foo"] = 1 assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) // go back online and sync. assert that the remote document has been updated // while the local document reflects the resolution of the conflict @@ -386,7 +393,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { expectedDocument.remove("hello") expectedDocument.remove("foo") expectedDocument["well"] = "shoot" - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -403,19 +410,19 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val insertResult = coll.insertOneAndSync(docToInsert) // find the local document we just inserted - val doc = coll.findOneById(insertResult.insertedId)!! + val doc = coll.find(documentIdFilter(insertResult.insertedId)).first()!! val doc1Id = BsonObjectId(doc.getObjectId("_id")) val doc1Filter = Document("_id", doc1Id) // update the document locally val doc1Update = Document("\$inc", Document("foo", 1)) - assertEquals(1, coll.updateOneById(doc1Id, doc1Update).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, doc1Update).matchedCount) // assert that nothing has been inserting remotely val expectedDocument = withoutSyncVersion(Document(doc)) expectedDocument["foo"] = 1 Assert.assertNull(remoteColl.find(doc1Filter).firstOrNull()) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // go online (in case we weren't already). sync. goOnline() @@ -423,7 +430,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // assert that the local insertion reflects remotely assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -440,7 +447,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val insertResult = coll.insertOneAndSync(docToInsert) // find the document we just inserted - val doc = coll.findOneById(insertResult.insertedId)!! + val doc = coll.find(documentIdFilter(insertResult.insertedId)).first()!! val doc1Id = BsonObjectId(doc.getObjectId("_id")) val doc1Filter = Document("_id", doc1Id) @@ -450,21 +457,21 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { streamAndSync() val expectedDocument = withoutSyncVersion(Document(doc)) assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // update the document locally val doc1Update = Document("\$inc", Document("foo", 1)) - assertEquals(1, coll.updateOneById(doc1Id, doc1Update).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, doc1Update).matchedCount) // assert that this update has not been reflected remotely, but has locally assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) expectedDocument["foo"] = 1 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // sync. assert that our update is reflected locally and remotely streamAndSync() assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -482,34 +489,34 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { streamAndSync() // assert the sync'd document is found locally and remotely - val doc = coll.findOneById(insertResult.insertedId)!! + val doc = coll.find(documentIdFilter(insertResult.insertedId)).first()!! val doc1Id = BsonObjectId(doc.getObjectId("_id")) val doc1Filter = Document("_id", doc1Id) val expectedDocument = withoutSyncVersion(Document(doc)) assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // delete the doc locally, then re-insert it. // assert the document is still the same locally and remotely - assertEquals(1, coll.deleteOneById(doc1Id).deletedCount) + assertEquals(1, coll.deleteOne(doc1Filter).deletedCount) coll.insertOneAndSync(doc) assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // update the document locally val doc1Update = Document("\$inc", Document("foo", 1)) - assertEquals(1, coll.updateOneById(doc1Id, doc1Update).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, doc1Update).matchedCount) // assert that the document has not been updated remotely yet, // but has locally assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) expectedDocument["foo"] = 1 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // sync. assert that the update has been reflected remotely and locally streamAndSync() assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -543,7 +550,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // assert that the remote deletion is reflected locally Assert.assertNull(remoteColl.find(doc1Filter).firstOrNull()) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) // sync. this should not re-sync the document streamAndSync() @@ -554,7 +561,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // assert that the remote insertion is NOT reflected locally assertEquals(doc, remoteColl.find(doc1Filter).first()) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) } } @@ -581,27 +588,27 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { }, null, null) coll.syncOne(doc1Id) streamAndSync() - assertEquals(doc, coll.findOneById(doc1Id)) - Assert.assertNotNull(coll.findOneById(doc1Id)) + assertEquals(doc, coll.find(doc1Filter).firstOrNull()) + Assert.assertNotNull(coll.find(doc1Filter)) // go offline. // delete the document remotely. // update the document locally. goOffline() remoteColl.deleteOne(doc1Filter) - assertEquals(1, coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))).matchedCount) // go back online and sync. assert that the document remains deleted remotely, // but has not been reflected locally yet goOnline() streamAndSync() Assert.assertNull(remoteColl.find(doc1Filter).firstOrNull()) - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) // sync again. assert that the resolution is reflected locally and remotely streamAndSync() Assert.assertNotNull(remoteColl.find(doc1Filter).firstOrNull()) - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) } } @@ -628,8 +635,8 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { }, null, null) coll.syncOne(doc1Id) streamAndSync() - assertEquals(doc, coll.findOneById(doc1Id)) - Assert.assertNotNull(coll.findOneById(doc1Id)) + assertEquals(doc, coll.find(doc1Filter).firstOrNull()) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) // delete the document remotely, then reinsert it. // wait for the events to stream @@ -639,7 +646,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { wait.acquire() // update the local document concurrently. sync. - assertEquals(1, coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))).matchedCount) streamAndSync() // assert that the remote doc has not reflected the update. @@ -648,12 +655,12 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { assertEquals(doc, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) val expectedDocument = Document("_id", doc1Id.value) expectedDocument["hello"] = "again" - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // do another sync pass. assert that the local and remote docs are in sync streamAndSync() assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -678,17 +685,17 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { coll.configure(failingConflictHandler, null, null) coll.syncOne(doc1Id) streamAndSync() - assertEquals(doc, coll.findOneById(doc1Id)) + assertEquals(doc, coll.find(doc1Filter).firstOrNull()) // update the document locally. sync. - assertEquals(1, coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))).matchedCount) streamAndSync() // assert that the local update has been reflected remotely. val expectedDocument = Document(withoutSyncVersion(doc)) expectedDocument["foo"] = 1 assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) } } @@ -716,8 +723,8 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { }, null, null) coll.syncOne(doc1Id) streamAndSync() - assertEquals(doc, coll.findOneById(doc1Id)) - Assert.assertNotNull(coll.findOneById(doc1Id)) + assertEquals(doc, coll.find(doc1Filter).firstOrNull()) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) // update the document remotely. wait for the update event to store. val sem = watchForEvents(syncTestRunner.namespace) @@ -725,7 +732,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { sem.acquire() // update the document locally. - assertEquals(1, coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))).matchedCount) // sync. assert that the remote document has received that update, // but locally the document has resolved to deletion @@ -734,13 +741,13 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { expectedDocument["foo"] = 1 assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) goOffline() - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) // go online and sync. the deletion should be reflected remotely and locally now goOnline() streamAndSync() Assert.assertNull(remoteColl.find(doc1Filter).firstOrNull()) - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) } } @@ -772,12 +779,14 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // reconfigure sync and the same way. do a sync pass. powerCycleDevice() coll.configure(DefaultSyncConflictResolvers.localWins(), null, null) + val sem = watchForEvents(syncTestRunner.namespace) streamAndSync() // update the document remotely. assert the update is reflected remotely. // reload our configuration again. reconfigure Sync again. val expectedDocument = Document(doc) var result = remoteColl.updateOne(doc1Filter, withNewSyncVersionSet(Document("\$inc", Document("foo", 2)))) + assertTrue(sem.tryAcquire(10, TimeUnit.SECONDS)) assertEquals(1, result.matchedCount) expectedDocument["foo"] = 3 assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) @@ -785,10 +794,10 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { coll.configure(DefaultSyncConflictResolvers.localWins(), null, null) // update the document locally. assert its success, after reconfiguration. - result = coll.updateOneById(doc1Id, Document("\$inc", Document("foo", 1))) + result = coll.updateOne(doc1Filter, Document("\$inc", Document("foo", 1))) assertEquals(1, result.matchedCount) expectedDocument["foo"] = 2 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // reconfigure again. powerCycleDevice() @@ -805,7 +814,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // assert the update was reflected locally. reconfigure again. expectedDocument["foo"] = 2 - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) powerCycleDevice() coll.configure(DefaultSyncConflictResolvers.localWins(), null, null) @@ -827,12 +836,12 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val doc1Id = coll.insertOneAndSync(docToInsert).insertedId // assert the document exists locally. desync it. - assertEquals(docToInsert, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(docToInsert, withoutSyncVersion(coll.find(documentIdFilter(doc1Id)).first()!!)) coll.desyncOne(doc1Id) // sync. assert that the desync'd document no longer exists locally streamAndSync() - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(documentIdFilter(doc1Id)).firstOrNull()) } } @@ -859,113 +868,23 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { streamAndSync() val expectedDocument = Document(docToInsert) expectedDocument["friend"] = "welcome" - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) assertEquals(docToInsert, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) // sync again. assert that the resolution is reflected // locally and remotely. streamAndSync() - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) assertEquals(expectedDocument, withoutSyncVersion(remoteColl.find(doc1Filter).first()!!)) } } - @Test - fun testPausedDocumentConfig() { - testSyncInBothDirections { - val testSync = syncTestRunner.syncMethods() - val remoteColl = syncTestRunner.remoteMethods() - var errorEmitted = false - - var conflictCounter = 0 - - testSync.configure( - ConflictHandler { _: BsonValue, _: ChangeEvent, remoteEvent: ChangeEvent -> - if (conflictCounter == 0) { - conflictCounter++ - errorEmitted = true - throw Exception("ouch") - } - remoteEvent.fullDocument - }, - ChangeEventListener { _: BsonValue, _: ChangeEvent -> - }, - ErrorListener { _, _ -> - }) - - // insert an initial doc - val testDoc = Document("hello", "world") - val result = testSync.insertOneAndSync(testDoc) - - // do a sync pass, synchronizing the doc - streamAndSync() - - Assert.assertNotNull(remoteColl.find(Document("_id", testDoc.get("_id"))).first()) - - // update the doc - val expectedDoc = Document("hello", "computer") - testSync.updateOneById(result.insertedId, Document("\$set", expectedDoc)) - - // create a conflict - var sem = watchForEvents(syncTestRunner.namespace) - remoteColl.updateOne(Document("_id", result.insertedId), withNewSyncVersionSet(Document("\$inc", Document("foo", 2)))) - sem.acquire() - - // do a sync pass, and throw an error during the conflict resolver - // freezing the document - streamAndSync() - Assert.assertTrue(errorEmitted) - - // update the doc remotely - val nextDoc = Document("hello", "friend") - - sem = watchForEvents(syncTestRunner.namespace) - remoteColl.updateOne(Document("_id", result.insertedId), nextDoc) - sem.acquire() - streamAndSync() - - // it should not have updated the local doc, as the local doc should be paused - assertEquals( - withoutId(expectedDoc), - withoutSyncVersion(withoutId(testSync.find(Document("_id", result.insertedId)).first()!!))) - - // update the local doc. this should unfreeze the config - testSync.updateOneById(result.insertedId, Document("\$set", Document("no", "op"))) - - streamAndSync() - - // this should still be the remote doc since remote wins - assertEquals( - withoutId(nextDoc), - withoutSyncVersion(withoutId(testSync.find(Document("_id", result.insertedId)).first()!!))) - - // update the doc remotely - val lastDoc = Document("good night", "computer") - - sem = watchForEvents(syncTestRunner.namespace) - remoteColl.updateOne( - Document("_id", result.insertedId), - withNewSyncVersion(lastDoc) - ) - sem.acquire() - - // now that we're sync'd and resumed, it should be reflected locally - // TODO: STITCH-1958 Possible race condition here for update listening - streamAndSync() - - assertEquals( - withoutId(lastDoc), - withoutSyncVersion( - withoutId(testSync.find(Document("_id", result.insertedId)).first()!!))) - } - } - @Test fun testConfigure() { val coll = syncTestRunner.syncMethods() val remoteColl = syncTestRunner.remoteMethods() - // insert a documnet locally + // insert a document locally val docToInsert = Document("hello", "world") val insertedId = coll.insertOneAndSync(docToInsert).insertedId @@ -1014,7 +933,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { coll.configure(failingConflictHandler, null, null) val insertResult = coll.insertOneAndSync(docToInsert) - val doc = coll.findOneById(insertResult.insertedId) + val doc = coll.find(documentIdFilter(insertResult.insertedId)).first() val doc1Id = BsonObjectId(doc?.getObjectId("_id")) val doc1Filter = Document("_id", doc1Id) @@ -1029,19 +948,19 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { assertEquals(0, versionCounterOf(firstRemoteDoc)) - assertEquals(expectedDocument, coll.findOneById(doc1Id)) + assertEquals(expectedDocument, coll.find(doc1Filter).firstOrNull()) // the remote document after a local update, but before a sync pass, should have the // same version as the original document, and be equivalent to the unupdated document val doc1Update = Document("\$inc", Document("foo", 1)) - assertEquals(1, coll.updateOneById(doc1Id, doc1Update).matchedCount) + assertEquals(1, coll.updateOne(doc1Filter, doc1Update).matchedCount) val secondRemoteDocBeforeSyncPass = remoteColl.find(doc1Filter).first()!! assertEquals(expectedDocument, withoutSyncVersion(secondRemoteDocBeforeSyncPass)) assertEquals(versionOf(firstRemoteDoc), versionOf(secondRemoteDocBeforeSyncPass)) expectedDocument["foo"] = 1 - assertEquals(expectedDocument, coll.findOneById(doc1Id)) + assertEquals(expectedDocument, coll.find(doc1Filter).firstOrNull()) // the remote document after a local update, and after a sync pass, should have a new // version with the same instance ID as the original document, a version counter @@ -1052,18 +971,18 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { assertEquals(instanceIdOf(firstRemoteDoc), instanceIdOf(secondRemoteDoc)) assertEquals(1, versionCounterOf(secondRemoteDoc)) - assertEquals(expectedDocument, coll.findOneById(doc1Id)) + assertEquals(expectedDocument, coll.find(doc1Filter).firstOrNull()) // the remote document after a local delete and local insert, but before a sync pass, // should have the same version as the previous document - assertEquals(1, coll.deleteOneById(doc1Id).deletedCount) + assertEquals(1, coll.deleteOne(doc1Filter).deletedCount) coll.insertOneAndSync(doc!!) val thirdRemoteDocBeforeSyncPass = remoteColl.find(doc1Filter).first()!! assertEquals(expectedDocument, withoutSyncVersion(thirdRemoteDocBeforeSyncPass)) expectedDocument.remove("foo") - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) // the remote document after a local delete and local insert, and after a sync pass, // should have the same instance ID as before and a version count, since the change @@ -1072,7 +991,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { val thirdRemoteDoc = remoteColl.find(doc1Filter).first()!! assertEquals(expectedDocument, withoutSyncVersion(thirdRemoteDoc)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) assertEquals(instanceIdOf(secondRemoteDoc), instanceIdOf(thirdRemoteDoc)) assertEquals(2, versionCounterOf(thirdRemoteDoc)) @@ -1080,14 +999,14 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // the remote document after a local delete, a sync pass, a local insert, and after // another sync pass should have a new instance ID, with a version counter of zero, // since the change events are not coalesced - assertEquals(1, coll.deleteOneById(doc1Id).deletedCount) + assertEquals(1, coll.deleteOne(doc1Filter).deletedCount) streamAndSync() coll.insertOneAndSync(doc) streamAndSync() val fourthRemoteDoc = remoteColl.find(doc1Filter).first()!! assertEquals(expectedDocument, withoutSyncVersion(thirdRemoteDoc)) - assertEquals(expectedDocument, withoutSyncVersion(coll.findOneById(doc1Id)!!)) + assertEquals(expectedDocument, withoutSyncVersion(coll.find(doc1Filter).first()!!)) Assert.assertNotEquals(instanceIdOf(secondRemoteDoc), instanceIdOf(fourthRemoteDoc)) assertEquals(0, versionCounterOf(fourthRemoteDoc)) @@ -1146,18 +1065,18 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // sync. assert the document has been synced. streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id)).firstOrNull()) // update the document locally. - coll.updateOneById(doc1Id, Document("\$inc", Document("i", 1))) + coll.updateOne(documentIdFilter(doc1Id), Document("\$inc", Document("i", 1))) // sync. assert the document still exists streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id)).firstOrNull()) // sync. assert the document still exists streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id))) } } @@ -1182,11 +1101,11 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { coll.syncOne(doc1Id) streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) - coll.updateOneById(doc1Id, Document("\$inc", Document("i", 1))) + coll.updateOne(doc1Filter, Document("\$inc", Document("i", 1))) streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(doc1Filter).firstOrNull()) assertEquals(1, remoteColl.deleteOne(doc1Filter).deletedCount) powerCycleDevice() @@ -1195,7 +1114,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { }, null, null) streamAndSync() - Assert.assertNull(coll.findOneById(doc1Id)) + Assert.assertNull(coll.find(doc1Filter).firstOrNull()) } } @@ -1221,16 +1140,16 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { coll.syncOne(doc1Id!!) streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id)).firstOrNull()) - coll.updateOneById(doc1Id, Document("\$inc", Document("i", 1))) + coll.updateOne(documentIdFilter(doc1Id), Document("\$inc", Document("i", 1))) streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id))) coll.syncOne(doc2Id!!) streamAndSync() - Assert.assertNotNull(coll.findOneById(doc1Id)) - Assert.assertNotNull(coll.findOneById(doc2Id)) + Assert.assertNotNull(coll.find(documentIdFilter(doc1Id)).firstOrNull()) + Assert.assertNotNull(coll.find(documentIdFilter(doc2Id)).firstOrNull()) } } @@ -1313,9 +1232,9 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // only an actual update document (with $set and $unset) // can work for the rest of this test syncTestRunner.mdbService.rules.rule(syncTestRunner.mdbRule._id).remove() - val result = coll.updateOneById(doc1Id, updateDoc) + val result = coll.updateOne(doc1Filter, updateDoc) assertEquals(1, result.matchedCount) - assertEquals(docAfterUpdate, withoutId(withoutSyncVersion(coll.findOneById(doc1Id)!!))) + assertEquals(docAfterUpdate, withoutId(withoutSyncVersion(coll.find(doc1Filter).first()!!))) // set they_are to unwriteable. the update should only update i_am // setting i_am to false and they_are to true would fail this test @@ -1339,7 +1258,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { ) streamAndSync() - assertEquals(docAfterUpdate, withoutId(withoutSyncVersion(coll.findOneById(doc1Id)!!))) + assertEquals(docAfterUpdate, withoutId(withoutSyncVersion(coll.find(doc1Filter).first()!!))) assertEquals(docAfterUpdate, withoutId(withoutSyncVersion(remoteColl.find(doc1Filter).first()!!))) assertTrue(eventSemaphore.tryAcquire(10, TimeUnit.SECONDS)) } @@ -1379,7 +1298,7 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // update the doc val expectedDoc = Document("hello", "computer") - testSync.updateOneById(result.insertedId, Document("\$set", expectedDoc)) + testSync.updateOne(documentIdFilter(result.insertedId), Document("\$set", expectedDoc)) // create a conflict var sem = watchForEvents(syncTestRunner.namespace) @@ -1407,15 +1326,13 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { // resume syncing here assertTrue(testSync.resumeSyncForDocument(result.insertedId)) + streamAndSync() // update the doc remotely val lastDoc = Document("good night", "computer") sem = watchForEvents(syncTestRunner.namespace) - remoteColl.updateOne( - Document("_id", result.insertedId), - withNewSyncVersion(lastDoc) - ) + remoteColl.updateOne(Document("_id", result.insertedId), withNewSyncVersion(lastDoc)) sem.acquire() // now that we're sync'd and resumed, it should be reflected locally @@ -1429,6 +1346,188 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { } } + @Test + fun testReadsBeforeAndAfterSync() { + val coll = syncTestRunner.syncMethods() + val remoteColl = syncTestRunner.remoteMethods() + + coll.configure(failingConflictHandler, null, null) + + val doc1 = Document("hello", "world") + val doc2 = Document("hello", "friend") + val doc3 = Document("hello", "goodbye") + + val insertResult = remoteColl.insertMany(listOf(doc1, doc2, doc3)) + assertEquals(3, insertResult.insertedIds.size) + + assertEquals(0, coll.count()) + assertEquals(0, coll.find().toList().size) + assertEquals(0, coll.aggregate(listOf(Document(mapOf( + "\$match" to mapOf("_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })) + )))).toList().size) + + insertResult.insertedIds.forEach { coll.syncOne(it.value) } + streamAndSync() + + assertEquals(3, coll.count()) + assertEquals(3, coll.find().toList().size) + assertEquals(3, coll.aggregate(listOf(Document(mapOf( + "\$match" to mapOf("_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })) + )))).toList().size) + + insertResult.insertedIds.forEach { coll.desyncOne(it.value) } + streamAndSync() + + assertEquals(0, coll.count()) + assertEquals(0, coll.find().toList().size) + assertEquals(0, coll.aggregate(listOf(Document(mapOf( + "\$match" to mapOf("_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })) + )))).toList().size) + } + + @Test + fun testInsertManyNoConflicts() { + val coll = syncTestRunner.syncMethods() + val remoteColl = syncTestRunner.remoteMethods() + + coll.configure(failingConflictHandler, null, null) + + val doc1 = Document("hello", "world") + val doc2 = Document("hello", "friend") + val doc3 = Document("hello", "goodbye") + + val insertResult = coll.insertManyAndSync(listOf(doc1, doc2, doc3)) + assertEquals(3, insertResult.insertedIds.size) + + assertEquals(3, coll.count()) + assertEquals(3, coll.find().toList().size) + assertEquals(3, coll.aggregate(listOf(Document(mapOf( + "\$match" to mapOf("_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })) + )))).toList().size) + + assertEquals(0, remoteColl.find(Document()).toList().size) + streamAndSync() + + assertEquals(3, remoteColl.find(Document()).toList().size) + assertEquals(doc1, withoutSyncVersion(remoteColl.find(Document("_id", doc1["_id"])).first()!!)) + assertEquals(doc2, withoutSyncVersion(remoteColl.find(Document("_id", doc2["_id"])).first()!!)) + assertEquals(doc3, withoutSyncVersion(remoteColl.find(Document("_id", doc3["_id"])).first()!!)) + } + + @Test + fun testUpdateManyNoConflicts() { + val coll = syncTestRunner.syncMethods() + val remoteColl = syncTestRunner.remoteMethods() + + coll.configure(failingConflictHandler, null, null) + + var updateResult = coll.updateMany( + Document(mapOf( + "fish" to listOf("one", "two", "red", "blue") + )), + Document("\$set", mapOf( + "fish" to listOf("black", "blue", "old", "new") + ))) + + assertEquals(0, updateResult.modifiedCount) + assertEquals(0, updateResult.matchedCount) + assertNull(updateResult.upsertedId) + + updateResult = coll.updateMany( + Document(mapOf( + "fish" to listOf("one", "two", "red", "blue") + )), + Document("\$set", mapOf( + "fish" to listOf("black", "blue", "old", "new") + )), + SyncUpdateOptions().upsert(true)) + + assertEquals(0, updateResult.modifiedCount) + assertEquals(0, updateResult.matchedCount) + assertNotNull(updateResult.upsertedId) + + val doc1 = Document(mapOf( + "hello" to "world", + "fish" to listOf("one", "two", "red", "blue") + )) + val doc2 = Document("hello", "friend") + val doc3 = Document("hello", "goodbye") + + val insertResult = coll.insertManyAndSync(listOf(doc1, doc2, doc3)) + assertEquals(3, insertResult.insertedIds.size) + + streamAndSync() + + assertEquals(4, remoteColl.find(Document()).toList().size) + + updateResult = coll.updateMany( + Document("fish", Document("\$exists", true)), + Document("\$set", Document("fish", listOf("trout", "mackerel", "cod", "hake"))) + ) + + assertEquals(2, updateResult.modifiedCount) + assertEquals(2, updateResult.matchedCount) + assertNull(updateResult.upsertedId) + + assertEquals(4, coll.count()) + + var localFound = coll.find(Document("fish", Document("\$exists", true))) + assertEquals(2, localFound.toList().size) + localFound.forEach { assertEquals(listOf("trout", "mackerel", "cod", "hake"), it!!["fish"]) } + + streamAndSync() + + val remoteFound = remoteColl.find(Document("fish", Document("\$exists", true))) + localFound = coll.find(Document("fish", Document("\$exists", true))) + + assertEquals(2, localFound.toList().size) + assertEquals(2, remoteFound.toList().size) + localFound.forEach { assertEquals(listOf("trout", "mackerel", "cod", "hake"), it!!["fish"]) } + remoteFound.forEach { assertEquals(listOf("trout", "mackerel", "cod", "hake"), it!!["fish"]) } + } + + @Test + fun testDeleteManyNoConflicts() { + val coll = syncTestRunner.syncMethods() + val remoteColl = syncTestRunner.remoteMethods() + + coll.configure(failingConflictHandler, null, null) + + val doc1 = Document("hello", "world") + val doc2 = Document("hello", "friend") + val doc3 = Document("hello", "goodbye") + + val insertResult = coll.insertManyAndSync(listOf(doc1, doc2, doc3)) + assertEquals(3, insertResult.insertedIds.size) + + assertEquals(3, coll.count()) + assertEquals(3, coll.find().toList().size) + assertEquals(3, coll.aggregate(listOf(Document(mapOf( + "\$match" to mapOf("_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })) + )))).toList().size) + + assertEquals(0, remoteColl.find(Document()).toList().size) + streamAndSync() + + assertEquals(3, remoteColl.find(Document()).toList().size) + coll.deleteMany(Document(mapOf( + "_id" to mapOf("\$in" to insertResult.insertedIds.map { + it.value })))) + + assertEquals(3, remoteColl.find(Document()).toList().size) + assertEquals(0, coll.find(Document()).toList().size) + + streamAndSync() + + assertEquals(0, remoteColl.find(Document()).toList().size) + assertEquals(0, coll.find(Document()).toList().size) + } + private fun watchForEvents(namespace: MongoNamespace, n: Int = 1): Semaphore { println("watching for $n change event(s) ns=$namespace") val waitFor = AtomicInteger(n) @@ -1547,6 +1646,9 @@ class SyncIntTestProxy(private val syncTestRunner: SyncIntTestRunner) { return newDocument } + private fun documentIdFilter(documentId: BsonValue) = + BsonDocument("_id", documentId) + private val failingConflictHandler = ConflictHandler { _: BsonValue, _: ChangeEvent, _: ChangeEvent -> Assert.fail("did not expect a conflict") throw IllegalStateException("unreachable") diff --git a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestRunner.kt b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestRunner.kt index 3be678fb6..a24e628e7 100644 --- a/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestRunner.kt +++ b/core/testutils/src/main/java/com/mongodb/stitch/core/testutils/sync/SyncIntTestRunner.kt @@ -112,9 +112,6 @@ interface SyncIntTestRunner { @Test fun testInsertInsertConflict() - @Test - fun testPausedDocumentConfig() - @Test fun testConfigure() @@ -138,4 +135,16 @@ interface SyncIntTestRunner { @Test fun testResumeSyncForDocumentResumesSync() + + @Test + fun testReadsBeforeAndAfterSync() + + @Test + fun testInsertManyNoConflicts() + + @Test + fun testUpdateManyNoConflicts() + + @Test + fun testDeleteManyNoConflicts() } diff --git a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/Sync.java b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/Sync.java index 1d7682441..e9b30c990 100644 --- a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/Sync.java +++ b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/Sync.java @@ -16,13 +16,17 @@ package com.mongodb.stitch.server.services.mongodb.remote; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler; import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; +import java.util.List; import java.util.Set; import javax.annotation.Nonnull; @@ -103,14 +107,40 @@ void configure(@Nonnull final ConflictHandler conflictResolver, boolean resumeSyncForDocument(@Nonnull final BsonValue documentId); /** - * Finds all documents in the collection. + * Counts the number of documents in the collection that have been synchronized with the remote. + * + * @return the number of documents in the collection + */ + long count(); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @return the number of documents in the collection + */ + long count(final Bson filter); + + /** + * Counts the number of documents in the collection that have been synchronized with the remote + * according to the given options. + * + * @param filter the query filter + * @param options the options describing the count + * @return the number of documents in the collection + */ + long count(final Bson filter, final SyncCountOptions options); + + /** + * Finds all documents in the collection that have been synchronized with the remote. * * @return the find iterable interface */ SyncFindIterable find(); /** - * Finds all documents in the collection. + * Finds all documents in the collection that have been synchronized with the remote. * * @param resultClass the class to decode each document into * @param the target document type of the iterable. @@ -119,7 +149,7 @@ void configure(@Nonnull final ConflictHandler conflictResolver, SyncFindIterable find(final Class resultClass); /** - * Finds all documents in the collection. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @return the find iterable interface @@ -127,59 +157,131 @@ void configure(@Nonnull final ConflictHandler conflictResolver, SyncFindIterable find(final Bson filter); /** - * Finds all documents in the collection. + * Finds all documents in the collection that have been synchronized with the remote. * * @param filter the query filter * @param resultClass the class to decode each document into * @param the target document type of the iterable. * @return the find iterable interface */ - SyncFindIterable find(final Bson filter, final Class resultClass); + SyncFindIterable find( + final Bson filter, + final Class resultClass); + /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. - * @return a task containing the document if found locally or remotely. + * @param pipeline the aggregation pipeline + * @return an iterable containing the result of the aggregation operation */ - DocumentT findOneById(final BsonValue documentId); + SyncAggregateIterable aggregate(final List pipeline); /** - * Finds a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Aggregates documents that have been synchronized with the remote + * according to the specified aggregation pipeline. * - * @param documentId the _id of the document to search for. + * @param pipeline the aggregation pipeline * @param resultClass the class to decode each document into * @param the target document type of the iterable. - * @return a task containing the document if found locally or remotely. + * @return an iterable containing the result of the aggregation operation + */ + SyncAggregateIterable aggregate( + final List pipeline, + final Class resultClass); + + /** + * Inserts the provided document. If the document is missing an identifier, one will be + * generated. Begin syncing the document against the remote. + * + * @param document the document to insert + * @return the result of the insert one operation + */ + SyncInsertOneResult insertOneAndSync(final DocumentT document); + + /** + * Inserts one or more documents. If the documents are missing an identifier, they will be + * generated. Begin syncing the documents against the remote. + * + * @param documents the documents to insert + * @return the result of the insert many operation + */ + SyncInsertManyResult insertManyAndSync(final List documents); + + /** + * Removes at most one document from the collection that has been synchronized with the remote + * and matches the given filter. If no documents match, the collection is not + * modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove one operation + */ + SyncDeleteResult deleteOne(final Bson filter); + + /** + * Removes all documents from the collection that have been synchronized with the remote + * and match the given query filter. If no documents + * match, the collection is not modified. + * + * @param filter the query filter to apply the the delete operation + * @return the result of the remove many operation + */ + SyncDeleteResult deleteMany(final Bson filter); + + /** + * Update a single document in the collection that has been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, the newly upserted + * document will automatically become synchronized. + * + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update one operation */ - ResultT findOneById(final BsonValue documentId, final Class resultClass); + SyncUpdateResult updateOne(final Bson filter, final Bson update); /** - * Updates a document by the given id. It is first searched for in the local synchronized cache - * and if not found and there is internet connectivity, it is searched for remotely. + * Update a single document that has been synchronized with the remote + * in the collection according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @param update the update specifier. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update one operation */ - RemoteUpdateResult updateOneById(final BsonValue documentId, final Bson update); + SyncUpdateResult updateOne( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); /** - * Inserts a single document and begins to synchronize it. + * Update all documents in the collection that have been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param document the document to insert and synchronize. - * @return the result of the insertion. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @return the result of the update many operation */ - RemoteInsertOneResult insertOneAndSync(final DocumentT document); + SyncUpdateResult updateMany(final Bson filter, final Bson update); /** - * Deletes a single document by the given id. It is first searched for in the local synchronized - * cache and if not found and there is internet connectivity, it is searched for remotely. + * Update all documents in the collection that have been synchronized with the remote + * according to the specified arguments. If the update results in an upsert, + * the newly upserted document will automatically become synchronized. * - * @param documentId the _id of the document to search for. - * @return a task containing the result of the local or remote update. + * @param filter a document describing the query filter, which may not be null. + * @param update a document describing the update, which may not be null. The update to + * apply must include only update operators. + * @param updateOptions the options to apply to the update operation + * @return the result of the update many operation */ - RemoteDeleteResult deleteOneById(final BsonValue documentId); + SyncUpdateResult updateMany( + final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions); } diff --git a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/SyncAggregateIterable.java b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/SyncAggregateIterable.java new file mode 100644 index 000000000..0e6c2ea6a --- /dev/null +++ b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/SyncAggregateIterable.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.server.services.mongodb.remote; + +/** + * Iterable for aggregate. + * + * @param The type of the result. + */ +public interface SyncAggregateIterable extends RemoteMongoIterable { +} diff --git a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncAggregateIterableImpl.java b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncAggregateIterableImpl.java new file mode 100644 index 000000000..8c7fdffea --- /dev/null +++ b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncAggregateIterableImpl.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.mongodb.stitch.server.services.mongodb.remote.internal; + +import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSyncAggregateIterable; +import com.mongodb.stitch.server.services.mongodb.remote.SyncAggregateIterable; + +public class SyncAggregateIterableImpl + extends RemoteMongoIterableImpl + implements SyncAggregateIterable { + SyncAggregateIterableImpl(final CoreSyncAggregateIterable iterable) { + super(iterable); + } +} diff --git a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncImpl.java b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncImpl.java index c943fd6e7..791a6f793 100644 --- a/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncImpl.java +++ b/server/services/mongodb-remote/src/main/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncImpl.java @@ -16,21 +16,27 @@ package com.mongodb.stitch.server.services.mongodb.remote.internal; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteDeleteResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteInsertOneResult; -import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult; import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener; import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler; import com.mongodb.stitch.core.services.mongodb.remote.sync.CoreSync; import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncCountOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions; +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult; import com.mongodb.stitch.server.services.mongodb.remote.Sync; +import com.mongodb.stitch.server.services.mongodb.remote.SyncAggregateIterable; import com.mongodb.stitch.server.services.mongodb.remote.SyncFindIterable; +import java.util.List; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.bson.BsonDocument; import org.bson.BsonValue; import org.bson.conversions.Bson; @@ -108,28 +114,72 @@ public SyncFindIterable find(final Bson filter, } @Override - public DocumentT findOneById(final BsonValue documentId) { - return proxy.findOneById(documentId); + public long count() { + return this.count(new BsonDocument()); } @Override - public ResultT findOneById(final BsonValue documentId, - final Class resultClass) { - return proxy.findOneById(documentId, resultClass); + public long count(final Bson filter) { + return this.count(filter, new SyncCountOptions()); } @Override - public RemoteDeleteResult deleteOneById(final BsonValue documentId) { - return proxy.deleteOneById(documentId); + public long count(final Bson filter, final SyncCountOptions options) { + return this.proxy.count(filter, options); } @Override - public RemoteInsertOneResult insertOneAndSync(final DocumentT document) { - return this.proxy.insertOneAndSync(document); + public SyncAggregateIterable aggregate(final List pipeline) { + return new SyncAggregateIterableImpl<>(this.proxy.aggregate(pipeline)); } @Override - public RemoteUpdateResult updateOneById(final BsonValue documentId, final Bson update) { - return proxy.updateOneById(documentId, update); + public SyncAggregateIterable aggregate(final List pipeline, + final Class resultClass) { + return new SyncAggregateIterableImpl<>(this.proxy.aggregate(pipeline, resultClass)); + } + + @Override + public SyncInsertOneResult insertOneAndSync(final DocumentT document) { + return proxy.insertOneAndSync(document); + } + + @Override + public SyncInsertManyResult insertManyAndSync(final List documents) { + return proxy.insertManyAndSync(documents); + } + + @Override + public SyncUpdateResult updateOne(final Bson filter, final Bson update) { + return proxy.updateOne(filter, update); + } + + @Override + public SyncUpdateResult updateOne(final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions) { + return proxy.updateOne(filter, update, updateOptions); + } + + @Override + public SyncUpdateResult updateMany(final Bson filter, final Bson update) { + return proxy.updateMany(filter, update); + } + + @Override + public SyncUpdateResult updateMany(final Bson filter, + final Bson update, + final SyncUpdateOptions updateOptions) { + return proxy.updateMany(filter, update, updateOptions); + } + + @Override + public SyncDeleteResult deleteOne(final Bson filter) { + return proxy.deleteOne(filter); + } + + @Override + public SyncDeleteResult deleteMany(final Bson filter) { + return proxy.deleteMany(filter); } } diff --git a/server/services/mongodb-remote/src/test/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncMongoClientIntTests.kt b/server/services/mongodb-remote/src/test/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncMongoClientIntTests.kt index 6baae6cb6..24c509177 100644 --- a/server/services/mongodb-remote/src/test/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncMongoClientIntTests.kt +++ b/server/services/mongodb-remote/src/test/java/com/mongodb/stitch/server/services/mongodb/remote/internal/SyncMongoClientIntTests.kt @@ -15,6 +15,11 @@ import com.mongodb.stitch.core.services.mongodb.remote.RemoteUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.sync.ChangeEventListener import com.mongodb.stitch.core.services.mongodb.remote.sync.ConflictHandler import com.mongodb.stitch.core.services.mongodb.remote.sync.ErrorListener +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncDeleteResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertManyResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncInsertOneResult +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateOptions +import com.mongodb.stitch.core.services.mongodb.remote.sync.SyncUpdateResult import com.mongodb.stitch.core.services.mongodb.remote.sync.internal.DataSynchronizer import com.mongodb.stitch.core.testutils.sync.ProxyRemoteMethods import com.mongodb.stitch.core.testutils.sync.ProxySyncMethods @@ -69,20 +74,28 @@ class SyncMongoClientIntTests : BaseStitchServerIntTest(), SyncIntTestRunner { sync.syncOne(id) } - override fun insertOneAndSync(document: Document): RemoteInsertOneResult { + override fun insertOneAndSync(document: Document): SyncInsertOneResult { return sync.insertOneAndSync(document) } - override fun findOneById(id: BsonValue): Document? { - return sync.findOneById(id) + override fun insertManyAndSync(documents: List): SyncInsertManyResult { + return sync.insertManyAndSync(documents) } - override fun updateOneById(documentId: BsonValue, update: Bson): RemoteUpdateResult { - return sync.updateOneById(documentId, update) + override fun updateOne(filter: Bson, update: Bson, updateOptions: SyncUpdateOptions): SyncUpdateResult { + return sync.updateOne(filter, update, updateOptions) } - override fun deleteOneById(documentId: BsonValue): RemoteDeleteResult { - return sync.deleteOneById(documentId) + override fun updateMany(filter: Bson, update: Bson, updateOptions: SyncUpdateOptions): SyncUpdateResult { + return sync.updateMany(filter, update, updateOptions) + } + + override fun deleteOne(filter: Bson): SyncDeleteResult { + return sync.deleteOne(filter) + } + + override fun deleteMany(filter: Bson): SyncDeleteResult { + return sync.deleteMany(filter) } override fun getSyncedIds(): Set { @@ -93,7 +106,7 @@ class SyncMongoClientIntTests : BaseStitchServerIntTest(), SyncIntTestRunner { sync.desyncOne(id) } - override fun find(filter: Bson): Iterable { + override fun find(filter: Bson): Iterable { return sync.find(filter) } @@ -104,6 +117,14 @@ class SyncMongoClientIntTests : BaseStitchServerIntTest(), SyncIntTestRunner { override fun getPausedDocumentIds(): Set { return sync.pausedDocumentIds } + + override fun count(filter: Bson): Long { + return sync.count(filter) + } + + override fun aggregate(pipeline: List): Iterable { + return sync.aggregate(pipeline) + } } private val mongodbUriProp = "test.stitch.mongodbURI" @@ -276,11 +297,6 @@ class SyncMongoClientIntTests : BaseStitchServerIntTest(), SyncIntTestRunner { testProxy.testInsertInsertConflict() } - @Test - override fun testPausedDocumentConfig() { - testProxy.testPausedDocumentConfig() - } - @Test override fun testConfigure() { testProxy.testConfigure() @@ -321,6 +337,26 @@ class SyncMongoClientIntTests : BaseStitchServerIntTest(), SyncIntTestRunner { testProxy.testResumeSyncForDocumentResumesSync() } + @Test + override fun testReadsBeforeAndAfterSync() { + testProxy.testReadsBeforeAndAfterSync() + } + + @Test + override fun testInsertManyNoConflicts() { + testProxy.testInsertManyNoConflicts() + } + + @Test + override fun testUpdateManyNoConflicts() { + testProxy.testUpdateManyNoConflicts() + } + + @Test + override fun testDeleteManyNoConflicts() { + testProxy.testDeleteManyNoConflicts() + } + /** * Get the uri for where mongodb is running locally. */