summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorArun Banala <arun.banala@mongodb.com>2020-08-06 10:58:20 +0100
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-08-13 12:58:34 +0000
commita41abb64d17f09f7c6ad48bc39730d3fc25fc2ed (patch)
tree2ae8f3eceb92d8b63307d9b0eba7e5536d070ce5
parent39794a19f5e0d24260eea783db1312bd69077324 (diff)
downloadmongo-a41abb64d17f09f7c6ad48bc39730d3fc25fc2ed.tar.gz
SERVER-48920 Use document deltas to determine whether update affects indexes
-rw-r--r--jstests/replsets/v2_delta_oplog_entries.js48
-rw-r--r--src/mongo/db/repl/SConscript1
-rw-r--r--src/mongo/db/update/SConscript12
-rw-r--r--src/mongo/db/update/delta_executor.cpp52
-rw-r--r--src/mongo/db/update/delta_executor.h9
-rw-r--r--src/mongo/db/update/delta_executor_test.cpp298
-rw-r--r--src/mongo/db/update/document_diff_applier.cpp305
-rw-r--r--src/mongo/db/update/document_diff_applier.h12
-rw-r--r--src/mongo/db/update/document_diff_applier_test.cpp44
-rw-r--r--src/mongo/db/update/document_diff_serialization_test.cpp85
-rw-r--r--src/mongo/db/update/document_diff_test.cpp7
-rw-r--r--src/mongo/db/update/document_diff_test_helpers.cpp126
-rw-r--r--src/mongo/db/update/document_diff_test_helpers.h91
-rw-r--r--src/mongo/db/update/update_driver.cpp4
-rw-r--r--src/mongo/db/update/update_driver_test.cpp118
15 files changed, 868 insertions, 344 deletions
diff --git a/jstests/replsets/v2_delta_oplog_entries.js b/jstests/replsets/v2_delta_oplog_entries.js
index 3ce0c4db39b..bc26fd4e1cb 100644
--- a/jstests/replsets/v2_delta_oplog_entries.js
+++ b/jstests/replsets/v2_delta_oplog_entries.js
@@ -25,17 +25,13 @@ const secondary = rst.getSecondary();
const secondaryColl = secondary.getDB("test").coll;
// Used for padding documents, in order to make full replacements expensive.
-function makeGiantStr() {
- let s = "";
- for (let i = 0; i < 1024; i++) {
- s += "_";
- }
- return s;
-}
-
-const kGiantStr = makeGiantStr();
+const kGiantStr = "_".repeat(100);
const kMediumLengthStr = "zzzzzzzzzzzzzzzzzzzzzzzzzz";
+// Create indexes on all the fields used through out the tests, so we can verify that the index
+// data is correctly updated on all the nodes.
+primaryColl.createIndex({padding: 1, 'x.1': 1, 'a.1': -1, 'subObj.a': 1});
+
let idGenGlob = 0;
function generateId() {
return idGenGlob++;
@@ -110,9 +106,9 @@ let id;
// Removing fields.
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, x: 3, y: 3, giantStr: kGiantStr},
+ preImage: {_id: id, x: 3, y: 3, padding: kGiantStr},
pipeline: [{$unset: ["x", "y"]}],
- postImage: {_id: id, giantStr: kGiantStr},
+ postImage: {_id: id, padding: kGiantStr},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -166,9 +162,9 @@ testUpdateReplicates({
// Adding a field to a sub object while removing a top level field.
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, subObj: {a: 1, b: 2}, toRemove: "foo", giantStr: kGiantStr},
- pipeline: [{$project: {subObj: 1, giantStr: 1}}, {$set: {"subObj.c": "foo"}}],
- postImage: {_id: id, subObj: {a: 1, b: 2, c: "foo"}, giantStr: kGiantStr},
+ preImage: {_id: id, subObj: {a: 1, b: 2}, toRemove: "foo", padding: kGiantStr},
+ pipeline: [{$project: {subObj: 1, padding: 1}}, {$set: {"subObj.c": "foo"}}],
+ postImage: {_id: id, subObj: {a: 1, b: 2, c: "foo"}, padding: kGiantStr},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -184,9 +180,9 @@ testUpdateReplicates({
// Inclusion projection dropping a subfield (subObj.toRemove).
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, x: "foo", subObj: {a: 1, toRemove: 2}, giantStr: kGiantStr},
- pipeline: [{$project: {subObj: {a: 1}, giantStr: 1}}],
- postImage: {_id: id, subObj: {a: 1}, giantStr: kGiantStr},
+ preImage: {_id: id, x: "foo", subObj: {a: 1, toRemove: 2}, padding: kGiantStr},
+ pipeline: [{$project: {subObj: {a: 1}, padding: 1}}],
+ postImage: {_id: id, subObj: {a: 1}, padding: kGiantStr},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -203,9 +199,9 @@ testUpdateReplicates({
// delta oplog entries instead of doing a full replacement.
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, x: "foo", subObj: {a: 1, b: 2}, giantStr: kGiantStr},
- pipeline: [{$replaceRoot: {newRoot: {x: "bar", subObj: {a: 1, b: 2}, giantStr: kGiantStr}}}],
- postImage: {_id: id, x: "bar", subObj: {a: 1, b: 2}, giantStr: kGiantStr},
+ preImage: {_id: id, x: "foo", subObj: {a: 1, b: 2}, padding: kGiantStr},
+ pipeline: [{$replaceRoot: {newRoot: {x: "bar", subObj: {a: 1, b: 2}, padding: kGiantStr}}}],
+ postImage: {_id: id, x: "bar", subObj: {a: 1, b: 2}, padding: kGiantStr},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -234,9 +230,9 @@ testUpdateReplicates({
// Setting a sub object inside an array.
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, x: kGiantStr, arrField: [{x: 1}, {x: 2}]},
+ preImage: {_id: id, padding: kGiantStr, arrField: [{x: 1}, {x: 2}]},
pipeline: [{$set: {"arrField.x": 5}}],
- postImage: {_id: id, x: kGiantStr, arrField: [{x: 5}, {x: 5}]},
+ postImage: {_id: id, padding: kGiantStr, arrField: [{x: 5}, {x: 5}]},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -261,9 +257,9 @@ testUpdateReplicates({
// Similar case of reordering fields.
id = generateId();
testUpdateReplicates({
- preImage: {_id: id, p: kGiantStr, a: 1, b: 1, c: 1, d: 1},
- pipeline: [{$replaceRoot: {newRoot: {_id: id, p: kGiantStr, a: 1, c: 1, b: 1, d: 1}}}],
- postImage: {_id: id, p: kGiantStr, a: 1, c: 1, b: 1, d: 1},
+ preImage: {_id: id, padding: kGiantStr, a: 1, b: 1, c: 1, d: 1},
+ pipeline: [{$replaceRoot: {newRoot: {_id: id, padding: kGiantStr, a: 1, c: 1, b: 1, d: 1}}}],
+ postImage: {_id: id, padding: kGiantStr, a: 1, c: 1, b: 1, d: 1},
expectedOplogEntry: kExpectDeltaEntry
});
@@ -338,7 +334,7 @@ function generateDeepObj(depth, maxDepth, value) {
};
}
-// Verify that diffing the deepest objects allowed by the js client, can produce a delta op-log
+// Verify that diffing the deepest objects allowed by the JS client can produce delta op-log
// entries.
id = generateId();
let path = "subObj.".repeat(146) + "subObj";
diff --git a/src/mongo/db/repl/SConscript b/src/mongo/db/repl/SConscript
index f6b8e2054f2..e5efba6af97 100644
--- a/src/mongo/db/repl/SConscript
+++ b/src/mongo/db/repl/SConscript
@@ -1407,6 +1407,7 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/db/stats/counters',
'$BUILD_DIR/mongo/db/storage/ephemeral_for_test/storage_ephemeral_for_test_core',
'$BUILD_DIR/mongo/db/transaction',
+ '$BUILD_DIR/mongo/db/update/update_test_helpers',
'$BUILD_DIR/mongo/dbtests/mocklib',
'$BUILD_DIR/mongo/executor/network_interface_factory',
'$BUILD_DIR/mongo/executor/network_interface_mock',
diff --git a/src/mongo/db/update/SConscript b/src/mongo/db/update/SConscript
index 9a93690dd21..d8de4629741 100644
--- a/src/mongo/db/update/SConscript
+++ b/src/mongo/db/update/SConscript
@@ -54,6 +54,7 @@ env.Library(
env.Library(
target='update',
source=[
+ 'delta_executor.cpp',
'document_diff_applier.cpp',
'document_diff_calculator.cpp',
'document_diff_serialization.cpp',
@@ -81,6 +82,15 @@ env.Library(
],
)
+env.Library(
+ target='update_test_helpers',
+ source=[
+ 'document_diff_test_helpers.cpp',
+ ],
+ LIBDEPS=[
+ 'update',
+ ],
+)
env.CppUnitTest(
target='db_update_test',
source=[
@@ -89,6 +99,7 @@ env.CppUnitTest(
'bit_node_test.cpp',
'compare_node_test.cpp',
'current_date_node_test.cpp',
+ 'delta_executor_test.cpp',
'document_diff_calculator_test.cpp',
'document_diff_serialization_test.cpp',
'document_diff_applier_test.cpp',
@@ -125,5 +136,6 @@ env.CppUnitTest(
'update',
'update_common',
'update_driver',
+ 'update_test_helpers',
],
)
diff --git a/src/mongo/db/update/delta_executor.cpp b/src/mongo/db/update/delta_executor.cpp
new file mode 100644
index 00000000000..466e66800f8
--- /dev/null
+++ b/src/mongo/db/update/delta_executor.cpp
@@ -0,0 +1,52 @@
+/**
+ * Copyright (C) 2020-present MongoDB, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the Server Side Public License, version 1,
+ * as published by MongoDB, Inc.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * Server Side Public License for more details.
+ *
+ * You should have received a copy of the Server Side Public License
+ * along with this program. If not, see
+ * <http://www.mongodb.com/licensing/server-side-public-license>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the Server Side Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/db/update/delta_executor.h"
+
+#include "mongo/bson/mutable/document.h"
+#include "mongo/db/update/object_replace_executor.h"
+
+namespace mongo {
+
+DeltaExecutor::ApplyResult DeltaExecutor::applyUpdate(
+ UpdateExecutor::ApplyParams applyParams) const {
+ const auto originalDoc = applyParams.element.getDocument().getObject();
+
+ auto applyDiffOutput = doc_diff::applyDiff(originalDoc, _diff, applyParams.indexData);
+ const auto& postImage = applyDiffOutput.postImage;
+ auto postImageHasId = postImage.hasField("_id");
+
+ auto result = ObjectReplaceExecutor::applyReplacementUpdate(
+ std::move(applyParams), postImage, postImageHasId);
+ result.indexesAffected = applyDiffOutput.indexesAffected;
+ return result;
+}
+} // namespace mongo \ No newline at end of file
diff --git a/src/mongo/db/update/delta_executor.h b/src/mongo/db/update/delta_executor.h
index 18d97f299e5..f5bc8c162ca 100644
--- a/src/mongo/db/update/delta_executor.h
+++ b/src/mongo/db/update/delta_executor.h
@@ -47,14 +47,7 @@ public:
*/
explicit DeltaExecutor(doc_diff::Diff diff) : _diff(std::move(diff)) {}
- ApplyResult applyUpdate(ApplyParams applyParams) const final {
- const auto originalDoc = applyParams.element.getDocument().getObject();
- auto postImage = doc_diff::applyDiff(originalDoc, _diff);
- auto postImageHasId = postImage.hasField("_id");
-
- return ObjectReplaceExecutor::applyReplacementUpdate(
- applyParams, postImage, postImageHasId);
- }
+ ApplyResult applyUpdate(ApplyParams applyParams) const final;
Value serialize() const final {
// Delta updates are only applied internally on secondaries. They are never passed between
diff --git a/src/mongo/db/update/delta_executor_test.cpp b/src/mongo/db/update/delta_executor_test.cpp
new file mode 100644
index 00000000000..32dd505adca
--- /dev/null
+++ b/src/mongo/db/update/delta_executor_test.cpp
@@ -0,0 +1,298 @@
+/**
+ * Copyright (C) 2020-present MongoDB, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the Server Side Public License, version 1,
+ * as published by MongoDB, Inc.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * Server Side Public License for more details.
+ *
+ * You should have received a copy of the Server Side Public License
+ * along with this program. If not, see
+ * <http://www.mongodb.com/licensing/server-side-public-license>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the Server Side Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kTest
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/bson/json.h"
+#include "mongo/bson/mutable/document.h"
+#include "mongo/db/update/delta_executor.h"
+#include "mongo/logv2/log.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+TEST(DeltaExecutorTest, Delete) {
+ BSONObj preImage(fromjson("{f1: {a: {b: {c: 1}, c: 1}}}"));
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p.a.b"));
+ indexData.addPath(FieldRef("f1.a.b"));
+ FieldRefSet fieldRefSet;
+ {
+ // When a path in the diff is a prefix of index path.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{d: {f1: false, f2: false, f3: false}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(), BSONObj());
+ ASSERT(result.indexesAffected);
+ }
+
+ {
+ // When a path in the diff is same as index path.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{sf1: {sa: {d: {p: false, c: false, b: false}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When the index path is a prefix of a path in the diff.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {sb: {d: {c: false}}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {}, c: 1}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // With common parent, but path diverges.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {d: {c: false}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {c: 1}}}}"));
+ ASSERT(!result.indexesAffected);
+ }
+}
+
+TEST(DeltaExecutorTest, Update) {
+ BSONObj preImage(fromjson("{f1: {a: {b: {c: 1}, c: 1}}}"));
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p.a.b"));
+ indexData.addPath(FieldRef("f1.a.b"));
+ FieldRefSet fieldRefSet;
+ {
+ // When a path in the diff is a prefix of index path.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{u: {f1: false, f2: false, f3: false}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: false, f2: false, f3: false}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When a path in the diff is same as index path.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {u: {p: false, c: false, b: false}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: false, c: false, p: false}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When the index path is a prefix of a path in the diff.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {sb: {u: {c: false}}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {c: false}, c: 1}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // With common parent, but path diverges.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {u: {c: false}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {c: 1}, c: false}}}"));
+ ASSERT(!result.indexesAffected);
+ }
+}
+
+TEST(DeltaExecutorTest, Insert) {
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p.a.b"));
+ // 'UpdateIndexData' will canonicalize the path and remove all numeric components. So the '2'
+ // and '33' components should not matter.
+ indexData.addPath(FieldRef("f1.2.a.33.b"));
+ FieldRefSet fieldRefSet;
+ {
+ // When a path in the diff is a prefix of index path.
+ auto doc = mutablebson::Document(BSONObj());
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{i: {f1: false, f2: false, f3: false}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: false, f2: false, f3: false}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When a path in the diff is same as index path.
+ auto doc = mutablebson::Document(fromjson("{f1: {a: {c: true}}}}"));
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {i: {p: false, c: false, b: false}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {p: false, c: false, b: false}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When the index path is a prefix of a path in the diff.
+ auto doc = mutablebson::Document(fromjson("{f1: {a: {b: {c: {e: 1}}}}}"));
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {sb: {sc: {i : {d: 2} }}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {c: {e: 1, d: 2}}}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // With common parent, but path diverges.
+ auto doc = mutablebson::Document(fromjson("{f1: {a: {b: {c: 1}}}}"));
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {i: {c: 2}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: {c: 1}, c: 2}}}"));
+ ASSERT(!result.indexesAffected);
+ }
+}
+
+TEST(DeltaExecutorTest, ArraysInIndexPath) {
+ BSONObj preImage(fromjson("{f1: [{a: {b: {c: 1}, c: 1}}, 1]}"));
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p.a.b"));
+ // Numeric components will be ignored, so they should not matter.
+ indexData.addPath(FieldRef("f1.9.a.10.b"));
+ FieldRefSet fieldRefSet;
+ {
+ // Test resize.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{sf1: {a: true, l: 1}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: [{a: {b: {c: 1}, c: 1}}]}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When the index path is a prefix of a path in the diff and also involves numeric
+ // components along the way. The numeric components should always be ignored.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {a: true, s0: {sa: {sb: {i: {d: 1} }}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: [{a: {b: {c: 1, d: 1}, c: 1}}, 1]}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When inserting a sub-object into array, and the sub-object diverges from the index path.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {a: true, u2: {b: 1}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: [{a: {b: {c: 1}, c: 1}}, 1, {b:1}]}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // When a common array path element is updated, but the paths diverge at the last element.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {a: true, s0: {sa: {d: {c: false} }}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: [{a: {b: {c: 1}}}, 1]}"));
+ ASSERT(!result.indexesAffected);
+ }
+}
+
+TEST(DeltaExecutorTest, ArraysAfterIndexPath) {
+ BSONObj preImage(fromjson("{f1: {a: {b: [{c: 1}, 2]}}}"));
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p.a.b"));
+ // 'UpdateIndexData' will canonicalize the path and remove all numeric components. So the '9'
+ // and '10' components should not matter.
+ indexData.addPath(FieldRef("f1.9.a.10"));
+ FieldRefSet fieldRefSet;
+ {
+ // Test resize.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ DeltaExecutor test(fromjson("{sf1: {sa: {sb: {a: true, l: 1}}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: [{c: 1}]}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // Updating a sub-array element.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {sb: {a: true, s0: {u: {c: 2}} }}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: [{c: 2}, 2]}}}"));
+ ASSERT(result.indexesAffected);
+ }
+ {
+ // Updating an array element.
+ auto doc = mutablebson::Document(preImage);
+ UpdateExecutor::ApplyParams params(doc.root(), fieldRefSet);
+ params.indexData = &indexData;
+ auto test = DeltaExecutor(fromjson("{sf1: {sa: {sb: {a: true, u0: 1 }}}}"));
+ auto result = test.applyUpdate(params);
+ ASSERT_BSONOBJ_BINARY_EQ(params.element.getDocument().getObject(),
+ fromjson("{f1: {a: {b: [1, 2]}}}"));
+ ASSERT(result.indexesAffected);
+ }
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/update/document_diff_applier.cpp b/src/mongo/db/update/document_diff_applier.cpp
index d726d5d0a87..e38c0ad8050 100644
--- a/src/mongo/db/update/document_diff_applier.cpp
+++ b/src/mongo/db/update/document_diff_applier.cpp
@@ -29,7 +29,9 @@
#include "mongo/platform/basic.h"
+#include "mongo/db/field_ref.h"
#include "mongo/db/update/document_diff_applier.h"
+#include "mongo/db/update_index_data.h"
#include "mongo/stdx/variant.h"
#include "mongo/util/string_map.h"
@@ -100,160 +102,203 @@ DocumentDiffTables buildObjDiffTables(DocumentDiffReader* reader) {
return out;
}
-// Mutually recursive with applyDiffToObject().
-void applyDiffToArray(const BSONObj& preImage, ArrayDiffReader* reader, BSONArrayBuilder* builder);
-
-void applyDiffToObject(const BSONObj& preImage,
- DocumentDiffReader* reader,
- BSONObjBuilder* builder) {
- // First build some tables so we can quickly apply the diff. We shouldn't need to examine the
- // diff again once this is done.
- const DocumentDiffTables tables = buildObjDiffTables(reader);
-
- // Keep track of what fields we already appended, so that we can insert the rest at the end.
- StringDataSet fieldsToSkipInserting;
-
- for (auto&& elt : preImage) {
- auto it = tables.fieldMap.find(elt.fieldNameStringData());
- if (it == tables.fieldMap.end()) {
- // Field is not modified, so we append it as is.
- builder->append(elt);
- continue;
+class DiffApplier {
+public:
+ DiffApplier(const UpdateIndexData* indexData) : _indexData(indexData) {}
+
+ void applyDiffToObject(const BSONObj& preImage,
+ FieldRef* path,
+ DocumentDiffReader* reader,
+ BSONObjBuilder* builder) {
+ // First build some tables so we can quickly apply the diff. We shouldn't need to examine
+ // the diff again once this is done.
+ const DocumentDiffTables tables = buildObjDiffTables(reader);
+
+ // Keep track of what fields we already appended, so that we can insert the rest at the end.
+ StringDataSet fieldsToSkipInserting;
+
+ for (auto&& elt : preImage) {
+ auto it = tables.fieldMap.find(elt.fieldNameStringData());
+ if (it == tables.fieldMap.end()) {
+ // Field is not modified, so we append it as is.
+ builder->append(elt);
+ continue;
+ }
+ FieldRef::FieldRefTempAppend tempAppend(*path, elt.fieldNameStringData());
+
+ stdx::visit(
+ visit_helper::Overloaded{
+ [this, &path](Delete) {
+ // Do not append anything.
+ updateIndexesAffected(path);
+ },
+
+ [this, &path, &builder, &elt, &fieldsToSkipInserting](const Update& update) {
+ builder->append(update.newElt);
+ updateIndexesAffected(path);
+ fieldsToSkipInserting.insert(elt.fieldNameStringData());
+ },
+
+ [](const Insert&) {
+ // Skip the pre-image version of the field. We'll add it at the end.
+ },
+
+ [this, &builder, &elt, &path](const SubDiff& subDiff) {
+ const auto type = subDiff.type();
+ if (elt.type() == BSONType::Object && type == DiffType::kDocument) {
+ BSONObjBuilder subBob(builder->subobjStart(elt.fieldNameStringData()));
+ auto reader = stdx::get<DocumentDiffReader>(subDiff.reader);
+ applyDiffToObject(elt.embeddedObject(), path, &reader, &subBob);
+ } else if (elt.type() == BSONType::Array && type == DiffType::kArray) {
+ BSONArrayBuilder subBob(
+ builder->subarrayStart(elt.fieldNameStringData()));
+ auto reader = stdx::get<ArrayDiffReader>(subDiff.reader);
+ applyDiffToArray(elt.embeddedObject(), path, &reader, &subBob);
+ } else {
+ // There's a type mismatch. The diff was expecting one type but the pre
+ // image contains a value of a different type. This means we are
+ // re-applying a diff.
+
+ // There must be some future operation which changed the type of this
+ // field from object/array to something else. So we set this field to
+ // null and expect the future value to overwrite the value here.
+
+ builder->appendNull(elt.fieldNameStringData());
+ updateIndexesAffected(path);
+ }
+
+ // Note: There's no need to update 'fieldsToSkipInserting' here, because a
+ // field cannot appear in both the sub-diff and insert section.
+ },
+ },
+ it->second);
}
- stdx::visit(
- visit_helper::Overloaded{
- [](Delete) {
- // Do nothing.
- },
+ // Insert remaining fields to the end.
+ for (auto&& elt : tables.fieldsToInsert) {
+ if (!fieldsToSkipInserting.count(elt.fieldNameStringData())) {
+ builder->append(elt);
+ FieldRef::FieldRefTempAppend tempAppend(*path, elt.fieldNameStringData());
+ updateIndexesAffected(path);
+ }
+ }
+ }
- [&builder, &elt, &fieldsToSkipInserting](const Update& update) {
- builder->append(update.newElt);
- fieldsToSkipInserting.insert(elt.fieldNameStringData());
- },
+ bool indexesAffected() const {
+ return _indexesAffected;
+ }
- [](const Insert&) {
- // Skip the pre-image version of the field. We'll add it at the end.
+private:
+ /**
+ * Given an (optional) member of the pre image array and a modification, apply the modification
+ * and add it to the post image array in 'builder'.
+ */
+ void appendNewValueForArrayIndex(boost::optional<BSONElement> preImageValue,
+ FieldRef* path,
+ const ArrayDiffReader::ArrayModification& modification,
+ BSONArrayBuilder* builder) {
+ stdx::visit(
+ visit_helper::Overloaded{
+ [this, &path, builder](const BSONElement& update) {
+ builder->append(update);
+ updateIndexesAffected(path);
},
-
- [&builder, &elt](const SubDiff& subDiff) {
- const auto type = subDiff.type();
- if (elt.type() == BSONType::Object && type == DiffType::kDocument) {
- BSONObjBuilder subBob(builder->subobjStart(elt.fieldNameStringData()));
- auto reader = stdx::get<DocumentDiffReader>(subDiff.reader);
- applyDiffToObject(elt.embeddedObject(), &reader, &subBob);
- } else if (elt.type() == BSONType::Array && type == DiffType::kArray) {
- BSONArrayBuilder subBob(builder->subarrayStart(elt.fieldNameStringData()));
- auto reader = stdx::get<ArrayDiffReader>(subDiff.reader);
- applyDiffToArray(elt.embeddedObject(), &reader, &subBob);
- } else {
- // There's a type mismatch. The diff was expecting one type but the pre
- // image contains a value of a different type. This means we are
- // re-applying a diff.
-
- // There must be some future operation which changed the type of this field
- // from object/array to something else. So we set this field to null field
- // and expect the future value to overwrite the value here.
-
- builder->appendNull(elt.fieldNameStringData());
+ [this, builder, &preImageValue, &path](auto reader) {
+ if (!preImageValue) {
+ // The pre-image's array was shorter than we expected. This means some
+ // future oplog entry will either re-write the value of this array index
+ // (or some parent) so we append a null and move on.
+ builder->appendNull();
+ updateIndexesAffected(path);
+ return;
+ }
+ if constexpr (std::is_same_v<decltype(reader), ArrayDiffReader>) {
+ if (preImageValue->type() == BSONType::Array) {
+ BSONArrayBuilder sub(builder->subarrayStart());
+ applyDiffToArray(preImageValue->embeddedObject(), path, &reader, &sub);
+ return;
+ }
+ } else if constexpr (std::is_same_v<decltype(reader), DocumentDiffReader>) {
+ if (preImageValue->type() == BSONType::Object) {
+ BSONObjBuilder sub(builder->subobjStart());
+ applyDiffToObject(preImageValue->embeddedObject(), path, &reader, &sub);
+ return;
+ }
}
- // Note: There's no need to update 'fieldsToSkipInserting' here, because a
- // field cannot appear in both the sub-diff and insert section.
+ // The type does not match what we expected. This means some future oplog
+ // entry will either re-write the value of this array index (or some
+ // parent) so we append a null and move on.
+ builder->appendNull();
+ updateIndexesAffected(path);
},
},
- it->second);
+ modification);
}
- // Insert remaining fields to the end.
- for (auto&& elt : tables.fieldsToInsert) {
- if (!fieldsToSkipInserting.count(elt.fieldNameStringData())) {
- builder->append(elt);
+ // Mutually recursive with applyDiffToObject().
+ void applyDiffToArray(const BSONObj& arrayPreImage,
+ FieldRef* path,
+ ArrayDiffReader* reader,
+ BSONArrayBuilder* builder) {
+ const auto resizeVal = reader->newSize();
+ // Each modification is an optional pair where the first component is the array index and
+ // the second is the modification type.
+ auto nextMod = reader->next();
+ BSONObjIterator preImageIt(arrayPreImage);
+
+ // If there is a resize of array, check if indexes are affected by the array modification.
+ if (resizeVal) {
+ updateIndexesAffected(path);
}
- }
-}
-
-/**
- * Given an (optional) member of the pre image array and a modification, apply the modification and
- * add it to the post image array in 'builder'.
- */
-void appendNewValueForIndex(boost::optional<BSONElement> preImageValue,
- const ArrayDiffReader::ArrayModification& modification,
- BSONArrayBuilder* builder) {
- stdx::visit(
- visit_helper::Overloaded{
- [builder](const BSONElement& update) { builder->append(update); },
- [builder, &preImageValue](auto reader) {
- if (!preImageValue) {
- // The pre-image's array was shorter than we expected. This means some
- // future oplog entry will either re-write the value of this array index
- // (or some parent) so we append a null and move on.
- builder->appendNull();
- return;
- }
- if constexpr (std::is_same_v<decltype(reader), ArrayDiffReader>) {
- if (preImageValue->type() == BSONType::Array) {
- BSONArrayBuilder sub(builder->subarrayStart());
- applyDiffToArray(preImageValue->embeddedObject(), &reader, &sub);
- return;
- }
- } else if constexpr (std::is_same_v<decltype(reader), DocumentDiffReader>) {
- if (preImageValue->type() == BSONType::Object) {
- BSONObjBuilder sub(builder->subobjStart());
- applyDiffToObject(preImageValue->embeddedObject(), &reader, &sub);
- return;
- }
- }
+ size_t idx = 0;
+ for (; preImageIt.more() && (!resizeVal || idx < *resizeVal); ++idx, ++preImageIt) {
+ auto idxAsStr = std::to_string(idx);
+ FieldRef::FieldRefTempAppend tempAppend(*path, idxAsStr);
+ if (nextMod && idx == nextMod->first) {
+ appendNewValueForArrayIndex(*preImageIt, path, nextMod->second, builder);
+ nextMod = reader->next();
+ } else {
+ // This index is not in the diff so we keep the value in the pre image.
+ builder->append(*preImageIt);
+ }
+ }
- // The type does not match what we expected. This means some future oplog
- // entry will either re-write the value of this array index (or some
- // parent) so we append a null and move on.
+ // Deal with remaining fields in the diff if the pre image was too short.
+ for (; (resizeVal && idx < *resizeVal) || nextMod; ++idx) {
+ auto idxAsStr = std::to_string(idx);
+ FieldRef::FieldRefTempAppend tempAppend(*path, idxAsStr);
+ if (nextMod && idx == nextMod->first) {
+ appendNewValueForArrayIndex(boost::none, path, nextMod->second, builder);
+ nextMod = reader->next();
+ } else {
+ // This field is not mentioned in the diff so we pad the post image with null.
+ updateIndexesAffected(path);
builder->appendNull();
- },
- },
- modification);
-}
-
-void applyDiffToArray(const BSONObj& arrayPreImage,
- ArrayDiffReader* reader,
- BSONArrayBuilder* builder) {
- const auto resizeVal = reader->newSize();
- // Each modification is an optional pair where the first component is the array index and the
- // second is the modification type.
- auto nextMod = reader->next();
- BSONObjIterator preImageIt(arrayPreImage);
-
- size_t idx = 0;
- for (; preImageIt.more() && (!resizeVal || idx < *resizeVal); ++idx, ++preImageIt) {
- if (nextMod && idx == nextMod->first) {
- appendNewValueForIndex(*preImageIt, nextMod->second, builder);
- nextMod = reader->next();
- } else {
- // This index is not in the diff so we keep the value in the pre image.
- builder->append(*preImageIt);
+ }
}
+
+ invariant(!resizeVal || *resizeVal == idx);
}
- // Deal with remaining fields in the diff if the pre image was too short.
- for (; (resizeVal && idx < *resizeVal) || nextMod; ++idx) {
- if (nextMod && idx == nextMod->first) {
- appendNewValueForIndex(boost::none, nextMod->second, builder);
- nextMod = reader->next();
- } else {
- // This field is not mentioned in the diff so we pad the post image with null.
- builder->appendNull();
+ void updateIndexesAffected(FieldRef* path) {
+ if (_indexData) {
+ _indexesAffected = _indexesAffected || _indexData->mightBeIndexed(*path);
}
}
- invariant(!resizeVal || *resizeVal == idx);
-}
+ const UpdateIndexData* _indexData;
+ bool _indexesAffected = false;
+};
} // namespace
-BSONObj applyDiff(const BSONObj& pre, const Diff& diff) {
+ApplyDiffOutput applyDiff(const BSONObj& pre, const Diff& diff, const UpdateIndexData* indexData) {
DocumentDiffReader reader(diff);
BSONObjBuilder out;
- applyDiffToObject(pre, &reader, &out);
- return out.obj();
+ DiffApplier applier(indexData);
+ FieldRef path;
+ applier.applyDiffToObject(pre, &path, &reader, &out);
+ return {out.obj(), applier.indexesAffected()};
}
} // namespace mongo::doc_diff
diff --git a/src/mongo/db/update/document_diff_applier.h b/src/mongo/db/update/document_diff_applier.h
index a825ee2cac9..22ebfce5247 100644
--- a/src/mongo/db/update/document_diff_applier.h
+++ b/src/mongo/db/update/document_diff_applier.h
@@ -31,12 +31,20 @@
#include "mongo/bson/bsonobjbuilder.h"
#include "mongo/db/update/document_diff_serialization.h"
+#include "mongo/db/update_index_data.h"
namespace mongo {
namespace doc_diff {
+
+struct ApplyDiffOutput {
+ BSONObj postImage;
+ bool indexesAffected;
+};
+
/**
- * Applies the diff to 'pre' and returns the post image. Throws if the diff is invalid.
+ * Applies the diff to 'pre' and returns the post image. Throws if the diff is invalid. The
+ * indexData' parameter is optional, if provided computes whether the indexes are affected.
*/
-BSONObj applyDiff(const BSONObj& pre, const Diff& diff);
+ApplyDiffOutput applyDiff(const BSONObj& pre, const Diff& diff, const UpdateIndexData* indexData);
} // namespace doc_diff
} // namespace mongo
diff --git a/src/mongo/db/update/document_diff_applier_test.cpp b/src/mongo/db/update/document_diff_applier_test.cpp
index 90ea71785b3..888803c35eb 100644
--- a/src/mongo/db/update/document_diff_applier_test.cpp
+++ b/src/mongo/db/update/document_diff_applier_test.cpp
@@ -34,6 +34,7 @@
#include "mongo/bson/json.h"
#include "mongo/db/update/document_diff_applier.h"
#include "mongo/db/update/document_diff_serialization.h"
+#include "mongo/db/update/document_diff_test_helpers.h"
#include "mongo/logv2/log.h"
#include "mongo/unittest/unittest.h"
@@ -43,14 +44,14 @@ namespace {
* Checks that applying the diff (once or twice) to 'preImage' produces the expected post image.
*/
void checkDiff(const BSONObj& preImage, const BSONObj& expectedPost, const Diff& diff) {
- BSONObj postImage = applyDiff(preImage, diff);
+ BSONObj postImage = applyDiffTestHelper(preImage, diff);
// This *MUST* check for binary equality, which is what we enforce between replica set
// members. Logical equality (through woCompare() or ASSERT_BSONOBJ_EQ) is not enough to show
// that the applier actually works.
ASSERT_BSONOBJ_BINARY_EQ(postImage, expectedPost);
- BSONObj postImageAgain = applyDiff(postImage, diff);
+ BSONObj postImageAgain = applyDiffTestHelper(postImage, diff);
ASSERT_BSONOBJ_BINARY_EQ(postImageAgain, expectedPost);
}
@@ -350,41 +351,48 @@ TEST(DiffApplierTest, UpdateArrayOfObjectsWithUpdateOperationNonContiguous) {
TEST(DiffApplierTest, DiffWithDuplicateFields) {
BSONObj diff = fromjson("{d: {dupField: false}, u: {dupField: 'new value'}}");
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), diff), DBException, 4728000);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), diff), DBException, 4728000);
}
TEST(DiffApplierTest, EmptyDiff) {
BSONObj emptyDiff;
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), emptyDiff), DBException, 4770500);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), emptyDiff), DBException, 4770500);
}
TEST(DiffApplierTest, ArrayDiffAtTop) {
BSONObj arrDiff = fromjson("{a: true, l: 5, 'd0': false}");
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), arrDiff), DBException, 4770503);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), arrDiff), DBException, 4770503);
}
TEST(DiffApplierTest, DuplicateFieldNames) {
// Within the same update type.
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{d: {a: false, b: false, a: false}}")),
- DBException,
- 4728000);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{u: {f1: 3, f1: 4}}")), DBException, 4728000);
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{i: {a: {}, a: null}}")), DBException, 4728000);
+ applyDiffTestHelper(BSONObj(), fromjson("{d: {a: false, b: false, a: false}}")),
+ DBException,
+ 4728000);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{u: {f1: 3, f1: 4}}")), DBException, 4728000);
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{sa: {d: {p: false}}, sa: {a: true, d: {p: false}}}")),
+ applyDiffTestHelper(BSONObj(), fromjson("{i: {a: {}, a: null}}")), DBException, 4728000);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(),
+ fromjson("{sa: {d: {p: false}}, sa: {a: true, d: {p: false}}}")),
DBException,
4728000);
// Across update types.
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{d: {b: false}, i: {a: {}, b: null}}")),
- DBException,
- 4728000);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{u: {b: false}, i: {a: {}, b: null}}")),
- DBException,
- 4728000);
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{u: {a: {}}, sa: {d : {k: false}}}")), DBException, 4728000);
+ applyDiffTestHelper(BSONObj(), fromjson("{d: {b: false}, i: {a: {}, b: null}}")),
+ DBException,
+ 4728000);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{u: {b: false}, i: {a: {}, b: null}}")),
+ DBException,
+ 4728000);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{u: {a: {}}, sa: {d : {k: false}}}")),
+ DBException,
+ 4728000);
}
} // namespace
diff --git a/src/mongo/db/update/document_diff_serialization_test.cpp b/src/mongo/db/update/document_diff_serialization_test.cpp
index 97c2fcdf438..c1107a1de32 100644
--- a/src/mongo/db/update/document_diff_serialization_test.cpp
+++ b/src/mongo/db/update/document_diff_serialization_test.cpp
@@ -34,6 +34,7 @@
#include "mongo/bson/json.h"
#include "mongo/db/update/document_diff_applier.h"
#include "mongo/db/update/document_diff_serialization.h"
+#include "mongo/db/update/document_diff_test_helpers.h"
#include "mongo/unittest/unittest.h"
namespace mongo::doc_diff {
@@ -425,59 +426,71 @@ TEST(DiffSerializationTest, ExecptionsWhileDiffBuildingDoesNotLeakMemory) {
TEST(DiffSerializationTest, UnexpectedFieldsInObjDiff) {
// Empty field names on top level.
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{d: {f: false}, '' : {}}")), DBException, 4770505);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{'' : {}}")), DBException, 4770505);
+ applyDiffTestHelper(BSONObj(), fromjson("{d: {f: false}, '' : {}}")), DBException, 4770505);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), fromjson("{'' : {}}")), DBException, 4770505);
// Expected diff to be non-empty.
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{d: {f: false}, s : {}}")), DBException, 4770500);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{sa : {}}")), DBException, 4770500);
+ applyDiffTestHelper(BSONObj(), fromjson("{d: {f: false}, s : {}}")), DBException, 4770500);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), fromjson("{sa : {}}")), DBException, 4770500);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{p : 1}")), DBException, 4770503);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{u : true}")), DBException, 4770507);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{d : []}")), DBException, 4770507);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{i : null}")), DBException, 4770507);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), fromjson("{p : 1}")), DBException, 4770503);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{u : true}")), DBException, 4770507);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), fromjson("{d : []}")), DBException, 4770507);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{i : null}")), DBException, 4770507);
// If the order of the fields is not obeyed.
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{i : {}, d: {}}")), DBException, 4770503);
-
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{s : {i: {}}, i: {}}")), DBException, 4770514);
+ applyDiffTestHelper(BSONObj(), fromjson("{i : {}, d: {}}")), DBException, 4770503);
+
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{sa : {u: {}}, d: {p: false}}")), DBException, 4770514);
- ASSERT_THROWS_CODE(applyDiff(BSONObj(), fromjson("{sa : {u: {}}, sb : {u: {}}, i: {}}")),
+ applyDiffTestHelper(BSONObj(), fromjson("{s : {i: {}}, i: {}}")), DBException, 4770514);
+ ASSERT_THROWS_CODE(applyDiffTestHelper(BSONObj(), fromjson("{sa : {u: {}}, d: {p: false}}")),
DBException,
4770514);
ASSERT_THROWS_CODE(
- applyDiff(BSONObj(), fromjson("{sa : {u: {}}, p: {}}")), DBException, 4770514);
+ applyDiffTestHelper(BSONObj(), fromjson("{sa : {u: {}}, sb : {u: {}}, i: {}}")),
+ DBException,
+ 4770514);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(BSONObj(), fromjson("{sa : {u: {}}, p: {}}")), DBException, 4770514);
// Empty deletes object is valid.
- ASSERT_BSONOBJ_BINARY_EQ(applyDiff(fromjson("{a: 1}"), fromjson("{d : {}}")),
+ ASSERT_BSONOBJ_BINARY_EQ(applyDiffTestHelper(fromjson("{a: 1}"), fromjson("{d : {}}")),
fromjson("{a: 1}"));
}
TEST(DiffSerializationTest, UnexpectedFieldsInArrayDiff) {
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: true, '3u' : 1}}")),
- DBException,
- 4770512);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: true, u : true}}")),
- DBException,
- 4770521);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: true, '5' : {}}}")),
- DBException,
- 4770521);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: false, 'u3' : 4}}")),
- DBException,
- 4770520);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: 1, 'u3' : 4}}")),
- DBException,
- 4770519);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: true, 's3' : 4}}")),
- DBException,
- 4770501);
- ASSERT_THROWS_CODE(applyDiff(fromjson("{arr: []}"), fromjson("{sarr: {a: true, 'd3' : 4}}")),
- DBException,
- 4770502);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: true, '3u' : 1}}")),
+ DBException,
+ 4770512);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: true, u : true}}")),
+ DBException,
+ 4770521);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: true, '5' : {}}}")),
+ DBException,
+ 4770521);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: false, 'u3' : 4}}")),
+ DBException,
+ 4770520);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: 1, 'u3' : 4}}")),
+ DBException,
+ 4770519);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: true, 's3' : 4}}")),
+ DBException,
+ 4770501);
+ ASSERT_THROWS_CODE(
+ applyDiffTestHelper(fromjson("{arr: []}"), fromjson("{sarr: {a: true, 'd3' : 4}}")),
+ DBException,
+ 4770502);
}
} // namespace
diff --git a/src/mongo/db/update/document_diff_test.cpp b/src/mongo/db/update/document_diff_test.cpp
index 4b88e10eae5..229d163a954 100644
--- a/src/mongo/db/update/document_diff_test.cpp
+++ b/src/mongo/db/update/document_diff_test.cpp
@@ -33,7 +33,6 @@
#include "mongo/bson/bson_depth.h"
#include "mongo/bson/json.h"
-#include "mongo/db/update/document_diff_applier.h"
#include "mongo/db/update/document_diff_calculator.h"
#include "mongo/db/update/document_diff_test_helpers.h"
#include "mongo/db/update/update_oplog_entry_serialization.h"
@@ -116,11 +115,11 @@ void runTest(std::vector<BSONObj> documents, size_t numSimulations) {
ASSERT(diff);
diffs.push_back(*diff);
- const auto postObj = applyDiff(preDoc, *diff);
+ const auto postObj = applyDiffTestHelper(preDoc, *diff);
ASSERT_BSONOBJ_BINARY_EQ(documents[i], postObj);
// Applying the diff the second time also generates the same object.
- ASSERT_BSONOBJ_BINARY_EQ(postObj, applyDiff(postObj, *diff));
+ ASSERT_BSONOBJ_BINARY_EQ(postObj, applyDiffTestHelper(postObj, *diff));
preDoc = documents[i];
}
@@ -130,7 +129,7 @@ void runTest(std::vector<BSONObj> documents, size_t numSimulations) {
for (size_t start = 0; start < diffs.size(); ++start) {
auto endObj = documents.back();
for (size_t i = start; i < diffs.size(); ++i) {
- endObj = applyDiff(endObj, diffs[i]);
+ endObj = applyDiffTestHelper(endObj, diffs[i]);
}
ASSERT_BSONOBJ_BINARY_EQ(endObj, documents.back());
}
diff --git a/src/mongo/db/update/document_diff_test_helpers.cpp b/src/mongo/db/update/document_diff_test_helpers.cpp
new file mode 100644
index 00000000000..7082025fad9
--- /dev/null
+++ b/src/mongo/db/update/document_diff_test_helpers.cpp
@@ -0,0 +1,126 @@
+/**
+ * Copyright (C) 2020-present MongoDB, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the Server Side Public License, version 1,
+ * as published by MongoDB, Inc.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * Server Side Public License for more details.
+ *
+ * You should have received a copy of the Server Side Public License
+ * along with this program. If not, see
+ * <http://www.mongodb.com/licensing/server-side-public-license>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the Server Side Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/db/update/document_diff_test_helpers.h"
+
+#include "mongo/bson/json.h"
+#include "mongo/db/update/document_diff_applier.h"
+#include "mongo/platform/random.h"
+
+namespace mongo::doc_diff {
+
+BSONObj createObjWithLargePrefix(const std::string& suffix) {
+ const static auto largeObj = BSON("prefixLargeField" << std::string(200, 'a'));
+ return largeObj.addFields(fromjson(suffix));
+}
+
+std::string getFieldName(int level, int fieldNum) {
+ return str::stream() << "f" << level << fieldNum;
+}
+
+Value getScalarFieldValue(PseudoRandom* rng) {
+ switch (rng->nextInt32(10)) {
+ case 0:
+ return Value("val"_sd);
+ case 1:
+ return Value(BSONNULL);
+ case 2:
+ return Value(-1LL);
+ case 3:
+ return Value(0);
+ case 4:
+ return Value(1.10);
+ case 5:
+ return Value(false);
+ case 6:
+ return Value(BSONRegEx("p"));
+ case 7:
+ return Value(Date_t());
+ case 8:
+ return Value(UUID::gen());
+ case 9:
+ return Value(BSONBinData("asdf", 4, BinDataGeneral));
+ default:
+ MONGO_UNREACHABLE;
+ }
+}
+
+BSONObj generateDoc(PseudoRandom* rng, MutableDocument* doc, int depthLevel) {
+ // Append a large field at each level so that the likelihood of generating a sub-diff is high.
+ auto largeFieldObj = createObjWithLargePrefix("{}");
+ doc->addField("prefixLargeField", Value(largeFieldObj.firstElement()));
+
+ // Reduce the probabilty of generated nested objects as we go deeper. After depth level 6, we
+ // should not be generating anymore nested objects.
+ const double subObjProbability = 0.3 - (depthLevel * 0.05);
+ const double subArrayProbability = 0.2 - (depthLevel * 0.05);
+
+ const int numFields = (5 - depthLevel) + rng->nextInt32(4);
+ for (int fieldNum = 0; fieldNum < numFields; ++fieldNum) {
+ const auto fieldName = getFieldName(depthLevel, fieldNum);
+ auto num = rng->nextCanonicalDouble();
+ if (num <= subObjProbability) {
+ MutableDocument subDoc;
+ doc->addField(fieldName, Value(generateDoc(rng, &subDoc, depthLevel + 1)));
+ } else if (num <= (subObjProbability + subArrayProbability)) {
+ const auto arrayLength = rng->nextInt32(10);
+ std::vector<Value> values;
+ auto numSubObjs = 0;
+ for (auto i = 0; i < arrayLength; i++) {
+ // The probablilty of generating a sub-object goes down as we generate more
+ // sub-objects and as the array position increases.
+ if (!rng->nextInt32(2 + numSubObjs + i)) {
+ MutableDocument subDoc;
+
+ // Ensure that the depth is higher as we generate more sub-objects, to avoid
+ // bloating up the document size exponentially.
+ values.push_back(
+ Value(generateDoc(rng, &subDoc, depthLevel + 1 + numSubObjs * 2)));
+ ++numSubObjs;
+ } else {
+ values.push_back(getScalarFieldValue(rng));
+ }
+ }
+
+ doc->addField(fieldName, Value(values));
+ } else {
+ doc->addField(fieldName, getScalarFieldValue(rng));
+ }
+ }
+ return doc->freeze().toBson();
+}
+
+BSONObj applyDiffTestHelper(BSONObj preImage, BSONObj diff) {
+ UpdateIndexData indexData;
+ return applyDiff(preImage, diff, &indexData).postImage;
+}
+
+} // namespace mongo::doc_diff
diff --git a/src/mongo/db/update/document_diff_test_helpers.h b/src/mongo/db/update/document_diff_test_helpers.h
index 09d545cba47..8ba9aea35a3 100644
--- a/src/mongo/db/update/document_diff_test_helpers.h
+++ b/src/mongo/db/update/document_diff_test_helpers.h
@@ -29,98 +29,17 @@
#pragma once
-#include <algorithm>
-
-#include "mongo/bson/json.h"
+#include "mongo/bson/mutable/document.h"
#include "mongo/db/exec/document_value/document.h"
#include "mongo/db/exec/document_value/value.h"
-#include "mongo/db/update/document_diff_applier.h"
-#include "mongo/db/update/document_diff_calculator.h"
+#include "mongo/db/update/update_executor.h"
#include "mongo/platform/random.h"
-#include "mongo/unittest/bson_test_util.h"
-#include "mongo/unittest/unittest.h"
namespace mongo::doc_diff {
-BSONObj createObjWithLargePrefix(const std::string& suffix) {
- const static auto largeObj = BSON("prefixLargeField" << std::string(200, 'a'));
- return largeObj.addFields(fromjson(suffix));
-}
-
-std::string getFieldName(int level, int fieldNum) {
- return str::stream() << "f" << level << fieldNum;
-}
-
-Value getScalarFieldValue(PseudoRandom* rng) {
- switch (rng->nextInt32(10)) {
- case 0:
- return Value("val"_sd);
- case 1:
- return Value(BSONNULL);
- case 2:
- return Value(-1LL);
- case 3:
- return Value(0);
- case 4:
- return Value(1.10);
- case 5:
- return Value(false);
- case 6:
- return Value(BSONRegEx("p"));
- case 7:
- return Value(Date_t());
- case 8:
- return Value(UUID::gen());
- case 9:
- return Value(BSONBinData("asdf", 4, BinDataGeneral));
- default:
- MONGO_UNREACHABLE;
- }
-}
-
-BSONObj generateDoc(PseudoRandom* rng, MutableDocument* doc, int depthLevel) {
- // Append a large field at each level so that the likelihood of generating a sub-diff is high.
- auto largeFieldObj = createObjWithLargePrefix("{}");
- doc->addField("prefixLargeField", Value(largeFieldObj.firstElement()));
-
- // Reduce the probabilty of generated nested objects as we go deeper. After depth level 6, we
- // should not be generating anymore nested objects.
- const double subObjProbability = 0.3 - (depthLevel * 0.05);
- const double subArrayProbability = 0.2 - (depthLevel * 0.05);
-
- const int numFields = (5 - depthLevel) + rng->nextInt32(4);
- for (int fieldNum = 0; fieldNum < numFields; ++fieldNum) {
- const auto fieldName = getFieldName(depthLevel, fieldNum);
- auto num = rng->nextCanonicalDouble();
- if (num <= subObjProbability) {
- MutableDocument subDoc;
- doc->addField(fieldName, Value(generateDoc(rng, &subDoc, depthLevel + 1)));
- } else if (num <= (subObjProbability + subArrayProbability)) {
- const auto arrayLength = rng->nextInt32(10);
- std::vector<Value> values;
- auto numSubObjs = 0;
- for (auto i = 0; i < arrayLength; i++) {
- // The probablilty of generating a sub-object goes down as we generate more
- // sub-objects and as the array position increases.
- if (!rng->nextInt32(2 + numSubObjs + i)) {
- MutableDocument subDoc;
-
- // Ensure that the depth is higher as we generate more sub-objects, to avoid
- // bloating up the document size exponentially.
- values.push_back(
- Value(generateDoc(rng, &subDoc, depthLevel + 1 + numSubObjs * 2)));
- ++numSubObjs;
- } else {
- values.push_back(getScalarFieldValue(rng));
- }
- }
+BSONObj createObjWithLargePrefix(const std::string& suffix);
- doc->addField(fieldName, Value(values));
- } else {
- doc->addField(fieldName, getScalarFieldValue(rng));
- }
- }
- return doc->freeze().toBson();
-}
+BSONObj generateDoc(PseudoRandom* rng, MutableDocument* doc, int depthLevel);
+BSONObj applyDiffTestHelper(BSONObj preImage, BSONObj diff);
} // namespace mongo::doc_diff
diff --git a/src/mongo/db/update/update_driver.cpp b/src/mongo/db/update/update_driver.cpp
index 2e0754b3f92..5e0fbbf2332 100644
--- a/src/mongo/db/update/update_driver.cpp
+++ b/src/mongo/db/update/update_driver.cpp
@@ -250,7 +250,9 @@ Status UpdateDriver::update(OperationContext* opCtx,
FieldRefSetWithStorage* modifiedPaths) {
// TODO: assert that update() is called at most once in a !_multi case.
- _affectIndices = (_updateType != UpdateType::kOperator && _indexedFields != nullptr);
+ _affectIndices =
+ (_updateType == UpdateType::kReplacement || _updateType == UpdateType::kPipeline) &&
+ (_indexedFields != nullptr);
_logDoc.reset();
diff --git a/src/mongo/db/update/update_driver_test.cpp b/src/mongo/db/update/update_driver_test.cpp
index 743d3fbe67d..f490fec9a24 100644
--- a/src/mongo/db/update/update_driver_test.cpp
+++ b/src/mongo/db/update/update_driver_test.cpp
@@ -562,12 +562,14 @@ TEST_F(CreateFromQuery, NotFullShardKeyRepl) {
class ModifiedPathsTestFixture : public mongo::unittest::Test {
public:
- std::string getModifiedPaths(mutablebson::Document* doc,
- BSONObj updateSpec,
- StringData matchedField = StringData(),
- std::vector<BSONObj> arrayFilterSpec = {}) {
+ void runUpdate(mutablebson::Document* doc,
+ const write_ops::UpdateModification& updateSpec,
+ StringData matchedField = StringData(),
+ std::vector<BSONObj> arrayFilterSpec = {},
+ bool fromOplog = false,
+ UpdateIndexData* indexData = nullptr) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
- UpdateDriver driver(expCtx);
+ _driver = std::make_unique<UpdateDriver>(expCtx);
std::map<StringData, std::unique_ptr<ExpressionWithPlaceholder>> arrayFilters;
for (const auto& filter : arrayFilterSpec) {
@@ -576,89 +578,139 @@ public:
ASSERT(expr->getPlaceholder());
arrayFilters[expr->getPlaceholder().get()] = std::move(expr);
}
- driver.parse(updateSpec, arrayFilters);
+ _driver->setFromOplogApplication(fromOplog);
+ _driver->refreshIndexKeys(indexData);
+ _driver->parse(updateSpec, arrayFilters);
const bool validateForStorage = true;
const FieldRefSet emptyImmutablePaths;
const bool isInsert = false;
FieldRefSetWithStorage modifiedPaths;
- ASSERT_OK(driver.update(expCtx->opCtx,
- matchedField,
- doc,
- validateForStorage,
- emptyImmutablePaths,
- isInsert,
- nullptr,
- nullptr,
- &modifiedPaths));
-
- return modifiedPaths.toString();
+ ASSERT_OK(_driver->update(
+ expCtx->opCtx,
+ matchedField,
+ doc,
+ validateForStorage,
+ emptyImmutablePaths,
+ isInsert,
+ nullptr,
+ nullptr,
+ (_driver->type() == UpdateDriver::UpdateType::kOperator) ? &modifiedPaths : nullptr));
+
+ _modifiedPaths = modifiedPaths.toString();
}
-};
-TEST_F(ModifiedPathsTestFixture, ReplaceFullDocumentReturnsEmptySet) {
- BSONObj spec = fromjson("{a: 1, b: 1}}");
- mutablebson::Document doc(fromjson("{a: 0, b: 0}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{}");
-}
+ std::unique_ptr<UpdateDriver> _driver;
+ std::string _modifiedPaths;
+};
TEST_F(ModifiedPathsTestFixture, SetFieldInRoot) {
BSONObj spec = fromjson("{$set: {a: 1}}");
mutablebson::Document doc(fromjson("{a: 0}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
}
TEST_F(ModifiedPathsTestFixture, IncFieldInRoot) {
BSONObj spec = fromjson("{$inc: {a: 1}}");
mutablebson::Document doc(fromjson("{a: 0}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
}
TEST_F(ModifiedPathsTestFixture, UnsetFieldInRoot) {
BSONObj spec = fromjson("{$unset: {a: ''}}");
mutablebson::Document doc(fromjson("{a: 0}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
}
TEST_F(ModifiedPathsTestFixture, UpdateArrayElement) {
BSONObj spec = fromjson("{$set: {'a.0.b': 1}}");
mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a.0.b}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a.0.b}");
}
TEST_F(ModifiedPathsTestFixture, SetBeyondTheEndOfArrayShouldReturnPathToArray) {
BSONObj spec = fromjson("{$set: {'a.1.b': 1}}");
mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
}
TEST_F(ModifiedPathsTestFixture, InsertingAndUpdatingArrayShouldReturnPathToArray) {
BSONObj spec = fromjson("{$set: {'a.0.b': 1, 'a.1.c': 2}}");
mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec), "{a}");
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
spec = fromjson("{$set: {'a.10.b': 1, 'a.1.c': 2}}");
mutablebson::Document doc2(fromjson("{a: [{b: 0}, {b: 0}]}"));
- ASSERT_EQ(getModifiedPaths(&doc2, spec), "{a}");
+ runUpdate(&doc2, spec);
+ ASSERT_EQ(_modifiedPaths, "{a}");
}
TEST_F(ModifiedPathsTestFixture, UpdateWithPositionalOperator) {
BSONObj spec = fromjson("{$set: {'a.$': 1}}");
mutablebson::Document doc(fromjson("{a: [0, 1, 2]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec, "0"_sd), "{a.0}");
+ runUpdate(&doc, spec, "0"_sd);
+ ASSERT_EQ(_modifiedPaths, "{a.0}");
}
TEST_F(ModifiedPathsTestFixture, UpdateWithPositionalOperatorToNestedField) {
BSONObj spec = fromjson("{$set: {'a.$.b': 1}}");
mutablebson::Document doc(fromjson("{a: [{b: 1}, {b: 2}]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec, "1"_sd), "{a.1.b}");
+ runUpdate(&doc, spec, "1"_sd);
+ ASSERT_EQ(_modifiedPaths, "{a.1.b}");
}
TEST_F(ModifiedPathsTestFixture, ArrayFilterThatMatchesNoElements) {
BSONObj spec = fromjson("{$set: {'a.$[i]': 1}}");
BSONObj arrayFilter = fromjson("{i: 0}");
mutablebson::Document doc(fromjson("{a: [1, 2, 3]}"));
- ASSERT_EQ(getModifiedPaths(&doc, spec, ""_sd, {arrayFilter}), "{a}");
+ runUpdate(&doc, spec, ""_sd, {arrayFilter});
+ ASSERT_EQ(_modifiedPaths, "{a}");
+}
+
+
+TEST_F(ModifiedPathsTestFixture, ReplaceFullDocumentAlwaysAffectsIndex) {
+ BSONObj spec = fromjson("{a: 1, b: 1}}");
+ mutablebson::Document doc(fromjson("{a: 0, b: 0}"));
+ runUpdate(&doc, spec);
+ ASSERT_EQ(_modifiedPaths, "{}");
+}
+
+
+TEST_F(ModifiedPathsTestFixture, PipelineUpdatesAlwaysAffectsIndex) {
+ BSONObj spec = fromjson("{$set: {'a.1.b': 1}}");
+ mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
+ runUpdate(&doc, std::vector<BSONObj>{spec});
+ ASSERT(_driver->modsAffectIndices());
+}
+
+TEST_F(ModifiedPathsTestFixture, DeltaUpdateNotAffectingIndex) {
+ BSONObj spec = fromjson("{d: {a: false}}");
+ mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
+ runUpdate(&doc, write_ops::UpdateModification(spec, {}), ""_sd, {}, true /* fromOplog */);
+ ASSERT(!_driver->modsAffectIndices());
+
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("p"));
+ runUpdate(
+ &doc, write_ops::UpdateModification(spec, {}), ""_sd, {}, true /* fromOplog */, &indexData);
+ ASSERT(!_driver->modsAffectIndices());
+}
+
+TEST_F(ModifiedPathsTestFixture, DeltaUpdateAffectingIndex) {
+ BSONObj spec = fromjson("{u: {a: 1}}");
+ mutablebson::Document doc(fromjson("{a: [{b: 0}]}"));
+ UpdateIndexData indexData;
+ indexData.addPath(FieldRef("q"));
+ indexData.addPath(FieldRef("a.p"));
+ runUpdate(
+ &doc, write_ops::UpdateModification(spec, {}), ""_sd, {}, true /* fromOplog */, &indexData);
+ ASSERT(_driver->modsAffectIndices());
}
} // namespace