summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenety Goh <benety@mongodb.com>2021-05-27 16:24:06 -0400
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-05-27 20:59:44 +0000
commitec30838ab7df84a1f4077de0eca08b3dc8740412 (patch)
tree6a651e931f7d953975c1b7e027f10ce1aba7cda8
parentfb3b1cdfd6dac3013c989f8dfdbb06e029ddf62c (diff)
downloadmongo-ec30838ab7df84a1f4077de0eca08b3dc8740412.tar.gz
SERVER-57234 clean up uniqueness.js to use different collection name for each test case
-rw-r--r--jstests/core/uniqueness.js47
1 files changed, 21 insertions, 26 deletions
diff --git a/jstests/core/uniqueness.js b/jstests/core/uniqueness.js
index af4dfb1507f..f86da4e7abd 100644
--- a/jstests/core/uniqueness.js
+++ b/jstests/core/uniqueness.js
@@ -11,10 +11,11 @@
(function() {
"use strict";
-var res;
-
-let t = db.jstests_uniqueness;
+let res;
+const collNamePrefix = 'jstests_uniqueness_';
+let collCount = 0;
+let t = db.getCollection(collNamePrefix + collCount++);
t.drop();
// test uniqueness of _id
@@ -36,33 +37,25 @@ assert.writeError(res);
assert(t.findOne({_id: 4}));
// Check for an error message when we index and there are dups
-db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a: 3});
-db.jstests_uniqueness2.insert({a: 3});
-assert.eq(2, db.jstests_uniqueness2.count());
-res = db.jstests_uniqueness2.createIndex({a: 1}, true);
-assert.commandFailed(res);
-assert(res.errmsg.match(/E11000/));
-
-// Check for an error message when we index in the background and there are dups
-db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a: 3});
-db.jstests_uniqueness2.insert({a: 3});
-assert.eq(2, db.jstests_uniqueness2.count());
-res = db.jstests_uniqueness2.createIndex({a: 1}, {unique: true, background: true});
+t = db.getCollection(collNamePrefix + collCount++);
+t.drop();
+assert.commandWorked(t.insert({_id: 0, a: 3}));
+assert.commandWorked(t.insert({_id: 1, a: 3}));
+assert.eq(2, t.find().itcount());
+res = t.createIndex({a: 1}, true);
assert.commandFailed(res);
assert(res.errmsg.match(/E11000/));
// Verify that duplicate key errors follow a fixed format, including field information.
-const coll = db.checkDupErrorMessage;
+t = db.getCollection(collNamePrefix + collCount++);
+t.drop();
const key = {
_id: 1
};
const expectedMessage =
- 'E11000 duplicate key error collection: ' + coll + ' index: _id_ dup key: { _id: 1.0 }';
-coll.drop();
-assert.commandWorked(coll.insert(key));
-res = coll.insert(key);
+ 'E11000 duplicate key error collection: ' + t + ' index: _id_ dup key: { _id: 1.0 }';
+assert.commandWorked(t.insert(key));
+res = t.insert(key);
assert.commandFailedWithCode(res, ErrorCodes.DuplicateKey);
assert.eq(res.nInserted, 0, tojson(res));
const writeError = res.getWriteError();
@@ -73,14 +66,16 @@ assert.eq(writeError.errmsg,
/* Check that if we update and remove _id, it gets added back by the DB */
/* - test when object grows */
+t = db.getCollection(collNamePrefix + collCount++);
t.drop();
-t.save({_id: 'Z'});
-t.update({}, {k: 2});
+assert.commandWorked(t.save({_id: 'Z'}));
+assert.commandWorked(t.update({}, {k: 2}));
assert.eq('Z', t.findOne()._id, "uniqueness.js problem with adding back _id");
/* - test when doesn't grow */
+t = db.getCollection(collNamePrefix + collCount++);
t.drop();
-t.save({_id: 'Z', k: 3});
-t.update({}, {k: 2});
+assert.commandWorked(t.save({_id: 'Z', k: 3}));
+assert.commandWorked(t.update({}, {k: 2}));
assert.eq('Z', t.findOne()._id, "uniqueness.js problem with adding back _id (2)");
})();