summaryrefslogtreecommitdiff
path: root/jstests
diff options
context:
space:
mode:
authorRuoxin Xu <ruoxin.xu@mongodb.com>2020-08-30 20:40:26 +0100
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-09-02 10:59:45 +0000
commit5154eca6d3a3489d7cc537453cb8672524242085 (patch)
tree288a05f05cf5fc42ae8dfaef8ef3905e4af5ac5b /jstests
parent69145064d3315c1f89bd7abfacde0087c5845501 (diff)
downloadmongo-5154eca6d3a3489d7cc537453cb8672524242085.tar.gz
SERVER-50208 Modify existing update tests to guarantee $v:2 oplog entries generation
Diffstat (limited to 'jstests')
-rw-r--r--jstests/concurrency/fsm_workloads/update_inc_pipeline.js12
-rw-r--r--jstests/core/update_with_pipeline.js21
-rw-r--r--jstests/sharding/update_sharded.js8
3 files changed, 29 insertions, 12 deletions
diff --git a/jstests/concurrency/fsm_workloads/update_inc_pipeline.js b/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
index bb4d78d7bcd..95fdc674ab1 100644
--- a/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
+++ b/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
@@ -16,5 +16,17 @@ var $config = extendWorkload($config, function($config, $super) {
$config.data.update_inc = "update_inc_pipeline";
+ $config.setup = function(db, collName, cluster) {
+ // Add 'largeStr' to the documents in order to make pipeline-based updates generate delta
+ // oplog entries.
+ var doc = {_id: this.id, largeStr: '*'.repeat(128)};
+
+ // Pre-populate the fields we need to avoid size change for capped collections.
+ for (var i = 0; i < this.threadCount; ++i) {
+ doc['t' + i] = 0;
+ }
+ assert.commandWorked(db[collName].insert(doc));
+ };
+
return $config;
});
diff --git a/jstests/core/update_with_pipeline.js b/jstests/core/update_with_pipeline.js
index f977e487a11..fb665d2c6ce 100644
--- a/jstests/core/update_with_pipeline.js
+++ b/jstests/core/update_with_pipeline.js
@@ -62,12 +62,15 @@ function testUpsertDoesInsert(query, update, resultDoc) {
assert.eq(coll.findOne({}), resultDoc, coll.find({}).toArray());
}
+// This can be used to make sure pipeline-based updates generate delta oplog entries.
+const largeStr = "x".repeat(1000);
+
// Update with existing document.
testUpdate({
query: {_id: 1},
- initialDocumentList: [{_id: 1, x: 1}],
+ initialDocumentList: [{_id: 1, x: 1, largeStr: largeStr}],
update: [{$set: {foo: 4}}],
- resultDocList: [{_id: 1, x: 1, foo: 4}],
+ resultDocList: [{_id: 1, x: 1, largeStr: largeStr, foo: 4}],
nModified: 1
});
testUpdate({
@@ -79,25 +82,26 @@ testUpdate({
});
testUpdate({
query: {_id: 1},
- initialDocumentList: [{_id: 1, x: 1, y: [{z: 1, foo: 1}]}],
+ initialDocumentList: [{_id: 1, x: 1, y: [{z: 1, foo: 1}], largeStr: largeStr}],
update: [{$unset: ["x", "y.z"]}],
- resultDocList: [{_id: 1, y: [{foo: 1}]}],
+ resultDocList: [{_id: 1, y: [{foo: 1}], largeStr: largeStr}],
nModified: 1
});
testUpdate({
query: {_id: 1},
- initialDocumentList: [{_id: 1, x: 1, t: {u: {v: 1}}}],
+ initialDocumentList: [{_id: 1, x: 1, t: {u: {v: 1}, largeStr: largeStr}}],
update: [{$replaceWith: "$t"}],
- resultDocList: [{_id: 1, u: {v: 1}}],
+ resultDocList: [{_id: 1, u: {v: 1}, largeStr: largeStr}],
nModified: 1
});
// Multi-update.
testUpdate({
query: {x: 1},
- initialDocumentList: [{_id: 1, x: 1}, {_id: 2, x: 1}],
+ initialDocumentList: [{_id: 1, x: 1, largeStr: largeStr}, {_id: 2, x: 1, largeStr: largeStr}],
update: [{$set: {bar: 4}}],
- resultDocList: [{_id: 1, x: 1, bar: 4}, {_id: 2, x: 1, bar: 4}],
+ resultDocList:
+ [{_id: 1, x: 1, largeStr: largeStr, bar: 4}, {_id: 2, x: 1, largeStr: largeStr, bar: 4}],
nModified: 2,
options: {multi: true}
});
@@ -236,7 +240,6 @@ testUpdate({
nModified: 1
});
-const largeStr = "x".repeat(1000);
testUpdate({
query: {_id: 1},
initialDocumentList: [{_id: 1, x: 1}],
diff --git a/jstests/sharding/update_sharded.js b/jstests/sharding/update_sharded.js
index ed4bdc1b406..48c615c533b 100644
--- a/jstests/sharding/update_sharded.js
+++ b/jstests/sharding/update_sharded.js
@@ -209,12 +209,14 @@ function testNestedShardKeys(collName, keyPattern) {
assert.commandWorked(sessionColl.update({_id: 15, skey: 1}, {$unset: {skey: 1}}));
assert.docEq(sessionColl.findOne({_id: 15}), {_id: 15});
+ // This can be used to make sure pipeline-based updates generate delta oplog entries.
+ const largeStr = '*'.repeat(128);
// For pipeline style.
- assert.commandWorked(coll.insert({_id: 16, skey: {skey: 1}}));
+ assert.commandWorked(coll.insert({_id: 16, skey: {skey: 1}, largeStr: largeStr}));
assert.commandWorked(sessionColl.update({_id: 16, "skey.skey": 1}, [{$unset: "skey.skey"}]));
- assert.docEq(sessionColl.findOne({_id: 16}), {_id: 16, skey: {}});
+ assert.docEq(sessionColl.findOne({_id: 16}), {_id: 16, skey: {}, largeStr: largeStr});
assert.commandWorked(sessionColl.update({_id: 16, skey: {}}, [{$unset: "skey"}]));
- assert.docEq(sessionColl.findOne({_id: 16}), {_id: 16});
+ assert.docEq(sessionColl.findOne({_id: 16}), {_id: 16, largeStr: largeStr});
}
testNestedShardKeys("update_nested", {"skey.skey": 1});