summaryrefslogtreecommitdiff
path: root/jstests/sharding
diff options
context:
space:
mode:
authorTommaso Tocci <tommaso.tocci@mongodb.com>2020-04-13 18:57:35 +0200
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-04-27 15:28:35 +0000
commit1837069a3456c1398a428cf6ffc57d89b148fb53 (patch)
tree926140eaec2887f128d188ec03d57dabada0aed4 /jstests/sharding
parent2f7abf486d3fc0f785cef1df949ae7fb80c8ba58 (diff)
downloadmongo-1837069a3456c1398a428cf6ffc57d89b148fb53.tar.gz
SERVER-47588 Refactor and rename sharding/findandmodify2 test
Diffstat (limited to 'jstests/sharding')
-rw-r--r--jstests/sharding/findandmodify2.js124
-rw-r--r--jstests/sharding/findandmodify_autosplit.js94
2 files changed, 94 insertions, 124 deletions
diff --git a/jstests/sharding/findandmodify2.js b/jstests/sharding/findandmodify2.js
deleted file mode 100644
index 17af6c1a685..00000000000
--- a/jstests/sharding/findandmodify2.js
+++ /dev/null
@@ -1,124 +0,0 @@
-(function() {
-'use strict';
-load('jstests/sharding/autosplit_include.js');
-
-var s = new ShardingTest({shards: 2, mongos: 1, other: {chunkSize: 1, enableAutoSplit: true}});
-assert.commandWorked(s.s0.adminCommand({enablesharding: "test"}));
-
-var db = s.getDB("test");
-s.ensurePrimaryShard('test', s.shard1.shardName);
-var primary = s.getPrimaryShard("test").getDB("test");
-var secondary = s.getOther(primary).getDB("test");
-
-var n = 100;
-var collection = "stuff";
-var minChunks = 2;
-
-var col_update = collection + '_col_update';
-var col_update_upsert = col_update + '_upsert';
-var col_fam = collection + '_col_fam';
-var col_fam_upsert = col_fam + '_upsert';
-
-var big = "x";
-for (var i = 0; i < 15; i++) {
- big += big;
-}
-
-// drop the collection
-db[col_update].drop();
-db[col_update_upsert].drop();
-db[col_fam].drop();
-db[col_fam_upsert].drop();
-
-// shard the collection on _id
-s.adminCommand({shardcollection: 'test.' + col_update, key: {_id: 1}});
-s.adminCommand({shardcollection: 'test.' + col_update_upsert, key: {_id: 1}});
-s.adminCommand({shardcollection: 'test.' + col_fam, key: {_id: 1}});
-s.adminCommand({shardcollection: 'test.' + col_fam_upsert, key: {_id: 1}});
-
-// update via findAndModify
-function via_fam() {
- for (var i = 0; i < n; i++) {
- db[col_fam].save({_id: i});
- }
-
- for (var i = 0; i < n; i++) {
- db[col_fam].findAndModify({query: {_id: i}, update: {$set: {big: big}}});
- }
-}
-
-// upsert via findAndModify
-function via_fam_upsert() {
- for (var i = 0; i < n; i++) {
- db[col_fam_upsert].findAndModify(
- {query: {_id: i}, update: {$set: {big: big}}, upsert: true});
- }
-}
-
-// update data using basic update
-function via_update() {
- for (var i = 0; i < n; i++) {
- db[col_update].save({_id: i});
- }
-
- for (var i = 0; i < n; i++) {
- db[col_update].update({_id: i}, {$set: {big: big}});
- }
-}
-
-// upsert data using basic update
-function via_update_upsert() {
- for (var i = 0; i < n; i++) {
- db[col_update_upsert].update({_id: i}, {$set: {big: big}}, true);
- }
-}
-
-print("---------- Update via findAndModify...");
-via_fam();
-waitForOngoingChunkSplits(s);
-
-print("---------- Done.");
-
-print("---------- Upsert via findAndModify...");
-via_fam_upsert();
-waitForOngoingChunkSplits(s);
-
-print("---------- Done.");
-
-print("---------- Basic update...");
-via_update();
-waitForOngoingChunkSplits(s);
-
-print("---------- Done.");
-
-print("---------- Basic update with upsert...");
-via_update_upsert();
-waitForOngoingChunkSplits(s);
-
-print("---------- Done.");
-
-print("---------- Printing chunks:");
-s.printChunks();
-
-print("---------- Verifying that both codepaths resulted in splits...");
-assert.gte(s.config.chunks.count({"ns": "test." + col_fam}),
- minChunks,
- "findAndModify update code path didn't result in splits");
-assert.gte(s.config.chunks.count({"ns": "test." + col_fam_upsert}),
- minChunks,
- "findAndModify upsert code path didn't result in splits");
-assert.gte(s.config.chunks.count({"ns": "test." + col_update}),
- minChunks,
- "update code path didn't result in splits");
-assert.gte(s.config.chunks.count({"ns": "test." + col_update_upsert}),
- minChunks,
- "upsert code path didn't result in splits");
-
-printjson(db[col_update].stats());
-
-// ensure that all chunks are smaller than chunkSize
-// make sure not teensy
-// test update without upsert and with upsert
-
-s.stop();
-})();
diff --git a/jstests/sharding/findandmodify_autosplit.js b/jstests/sharding/findandmodify_autosplit.js
new file mode 100644
index 00000000000..bfe6fd82584
--- /dev/null
+++ b/jstests/sharding/findandmodify_autosplit.js
@@ -0,0 +1,94 @@
+/*
+ * Tests that the autosplitter makes at least one split
+ */
+(function() {
+'use strict';
+load('jstests/sharding/autosplit_include.js');
+
+var st = new ShardingTest({shards: 1, mongos: 1, other: {chunkSize: 1, enableAutoSplit: true}});
+
+/* Return total number of chunks for a specific collection */
+function getNumChunksForColl(coll) {
+ const chunks = st.getDB('config').getCollection('chunks');
+ return chunks.countDocuments({ns: coll.getFullName()});
+}
+
+/* Return a collection named @collName sharded on `_id` */
+function setupCollection(collName) {
+ let coll = db.getCollection(collName);
+ st.shardColl(coll, {_id: 1}, false /* split */, false /* move */);
+ return coll;
+}
+
+const n = 64;
+const minChunks = 2;
+const big = 'x'.repeat(32768); // 32 KB
+const db = st.getDB('test');
+const collPrefix = 'update_and_autosplit_via_';
+
+jsTestLog('Update via findAndModify');
+{
+ let coll = setupCollection(collPrefix + 'fam');
+
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let i = 0; i < n; i++) {
+ bulk.insert({_id: i});
+ }
+ assert.commandWorked(bulk.execute());
+
+ for (var i = 0; i < n; i++) {
+ coll.findAndModify({query: {_id: i}, update: {$set: {big: big}}});
+ }
+
+ waitForOngoingChunkSplits(st);
+ assert.gte(getNumChunksForColl(coll),
+ minChunks,
+ "findAndModify update code path didn't result in splits");
+}
+
+jsTestLog("Upsert via findAndModify");
+{
+ let coll = setupCollection(collPrefix + 'fam_upsert');
+
+ for (let i = 0; i < n; i++) {
+ coll.findAndModify({query: {_id: i}, update: {$set: {big: big}}, upsert: true});
+ }
+
+ waitForOngoingChunkSplits(st);
+ assert.gte(getNumChunksForColl(coll),
+ minChunks,
+ "findAndModify upsert code path didn't result in splits");
+}
+
+jsTestLog("Basic update");
+{
+ let coll = setupCollection(collPrefix + 'update');
+
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let i = 0; i < n; i++) {
+ bulk.insert({_id: i});
+ }
+ assert.commandWorked(bulk.execute());
+
+ for (let i = 0; i < n; i++) {
+ assert.commandWorked(coll.update({_id: i}, {$set: {big: big}}));
+ }
+
+ waitForOngoingChunkSplits(st);
+ assert.gte(getNumChunksForColl(coll), minChunks, "update code path didn't result in splits");
+}
+
+jsTestLog("Basic update with upsert");
+{
+ let coll = setupCollection(collPrefix + 'update_upsert');
+
+ for (var i = 0; i < n; i++) {
+ assert.commandWorked(coll.update({_id: i}, {$set: {big: big}}, true));
+ }
+
+ waitForOngoingChunkSplits(st);
+ assert.gte(getNumChunksForColl(coll), minChunks, "upsert code path didn't result in splits");
+}
+
+st.stop();
+})();