diff options
author | Max Hirschhorn <max.hirschhorn@mongodb.com> | 2018-05-25 22:07:24 -0400 |
---|---|---|
committer | Max Hirschhorn <max.hirschhorn@mongodb.com> | 2018-05-25 22:07:24 -0400 |
commit | 25f72cf5f1d0d894680ef855f646e27f234ce6a3 (patch) | |
tree | 6d6db543f58eaf808332c50d3491a1e24ddffdf9 /src/mongo/shell | |
parent | f0e5229b631668c9bde511e607bf52fd871e582d (diff) | |
download | mongo-25f72cf5f1d0d894680ef855f646e27f234ce6a3.tar.gz |
SERVER-34779 Add hook for checking dbhash while a test is running.
Enables the CheckReplDBHashInBackground hook in the
replica_sets_jscore_passthrough.yml test suite.
Also fixes the WTPreserveSnapshotHistoryIndefinitely failpoint to ignore
attempts from _decreaseTargetSnapshotWindowSize() to move the oldest
timestamp forward.
Diffstat (limited to 'src/mongo/shell')
-rw-r--r-- | src/mongo/shell/assert.js | 14 | ||||
-rw-r--r-- | src/mongo/shell/replsettest.js | 228 | ||||
-rw-r--r-- | src/mongo/shell/utils.js | 16 |
3 files changed, 174 insertions, 84 deletions
diff --git a/src/mongo/shell/assert.js b/src/mongo/shell/assert.js index bfb3a25bc17..dbd1b01c2cf 100644 --- a/src/mongo/shell/assert.js +++ b/src/mongo/shell/assert.js @@ -6,10 +6,12 @@ doassert = function(msg, obj) { if (typeof(msg) == "object") msg = tojson(msg); - if (typeof(msg) == "string" && msg.indexOf("assert") == 0) - print(msg); - else - print("assert: " + msg); + if (jsTest.options().traceExceptions) { + if (typeof(msg) == "string" && msg.indexOf("assert") == 0) + print(msg); + else + print("assert: " + msg); + } var ex; if (obj) { @@ -17,7 +19,9 @@ doassert = function(msg, obj) { } else { ex = Error(msg); } - print(ex.stack); + if (jsTest.options().traceExceptions) { + print(ex.stack); + } throw ex; }; diff --git a/src/mongo/shell/replsettest.js b/src/mongo/shell/replsettest.js index e077871aec7..f823c6f9688 100644 --- a/src/mongo/shell/replsettest.js +++ b/src/mongo/shell/replsettest.js @@ -1395,37 +1395,147 @@ var ReplSetTest = function(opts) { }, "awaiting replication", timeout); }; + this.getHashesUsingSessions = function(sessions, dbName, { + filterCapped: filterCapped = true, + filterMapReduce: filterMapReduce = true, + } = {}) { + return sessions.map(session => { + const db = session.getDatabase(dbName); + const res = assert.commandWorked(db.runCommand({dbHash: 1})); + + // The "capped" field in the dbHash command response is new as of MongoDB 4.0. + const cappedCollections = new Set(filterCapped ? res.capped : []); + + for (let collName of Object.keys(res.collections)) { + // Capped collections are not necessarily truncated at the same points across + // replica set members and may therefore not have the same md5sum. We remove them + // from the dbHash command response to avoid an already known case of a mismatch. + // See SERVER-16049 for more details. + // + // If a map-reduce operation is interrupted by the server stepping down, then an + // unreplicated "tmp.mr." collection may be left behind. We remove it from the + // dbHash command response to avoid an already known case of a mismatch. + // TODO SERVER-27147: Stop filtering out "tmp.mr." collections. + if (cappedCollections.has(collName) || + (filterMapReduce && collName.startsWith("tmp.mr."))) { + delete res.collections[collName]; + // The "uuids" field in the dbHash command response is new as of MongoDB 4.0. + if (res.hasOwnProperty("uuids")) { + delete res.uuids[collName]; + } + } + } + + return res; + }); + }; + + this.getCollectionDiffUsingSessions = function( + primarySession, secondarySession, dbName, collNameOrUUID) { + function PeekableCursor(cursor) { + let _stashedDoc; + + this.hasNext = function hasNext() { + return cursor.hasNext(); + }; + + this.peekNext = function peekNext() { + if (_stashedDoc === undefined) { + _stashedDoc = cursor.next(); + } + return _stashedDoc; + }; + + this.next = function next() { + const result = (_stashedDoc === undefined) ? cursor.next() : _stashedDoc; + _stashedDoc = undefined; + return result; + }; + } + + const docsWithDifferentContents = []; + const docsMissingOnPrimary = []; + const docsMissingOnSecondary = []; + + const primaryDB = primarySession.getDatabase(dbName); + const secondaryDB = secondarySession.getDatabase(dbName); + + const primaryCursor = new PeekableCursor(new DBCommandCursor( + primaryDB, primaryDB.runCommand({find: collNameOrUUID, sort: {_id: 1}}))); + + const secondaryCursor = new PeekableCursor(new DBCommandCursor( + secondaryDB, secondaryDB.runCommand({find: collNameOrUUID, sort: {_id: 1}}))); + + while (primaryCursor.hasNext() && secondaryCursor.hasNext()) { + const primaryDoc = primaryCursor.peekNext(); + const secondaryDoc = secondaryCursor.peekNext(); + + if (bsonBinaryEqual(primaryDoc, secondaryDoc)) { + // The same document was found on the primary and secondary so we just move on to + // the next document for both cursors. + primaryCursor.next(); + secondaryCursor.next(); + continue; + } + + const ordering = bsonWoCompare({_: primaryDoc._id}, {_: secondaryDoc._id}); + if (ordering === 0) { + // The documents have the same _id but have different contents. + docsWithDifferentContents.push({primary: primaryDoc, secondary: secondaryDoc}); + primaryCursor.next(); + secondaryCursor.next(); + } else if (ordering < 0) { + // The primary's next document has a smaller _id than the secondary's next document. + // Since we are iterating the documents in ascending order by their _id, we'll never + // see a document with 'primaryDoc._id' on the secondary. + docsMissingOnSecondary.push(primaryDoc); + primaryCursor.next(); + } else if (ordering > 0) { + // The primary's next document has a larger _id than the secondary's next document. + // Since we are iterating the documents in ascending order by their _id, we'll never + // see a document with 'secondaryDoc._id' on the primary. + docsMissingOnPrimary.push(secondaryDoc); + secondaryCursor.next(); + } + } + + while (primaryCursor.hasNext()) { + // We've exhausted the secondary's cursor already, so everything remaining from the + // primary's cursor must be missing from secondary. + docsMissingOnSecondary.push(primaryCursor.next()); + } + + while (secondaryCursor.hasNext()) { + // We've exhausted the primary's cursor already, so everything remaining from the + // secondary's cursor must be missing from primary. + docsMissingOnPrimary.push(secondaryCursor.next()); + } + + return {docsWithDifferentContents, docsMissingOnPrimary, docsMissingOnSecondary}; + }; + // Gets the dbhash for the current primary and for all secondaries (or the members of 'slaves', // if specified). - this.getHashes = function(db, slaves) { - assert.neq(db, 'local', 'Cannot run getHashes() on the "local" database'); + this.getHashes = function(dbName, slaves) { + assert.neq(dbName, 'local', 'Cannot run getHashes() on the "local" database'); // getPrimary() repopulates 'self._slaves'. this.getPrimary(); - var res = {}; slaves = slaves || this._slaves; - // If MapReduce is interrupted by a stepdown, it could still have 'tmp.mr' collections that - // it will not be able to delete. Excluding them from dbhash will prevent a mismatch. - // TODO SERVER-27147: no need to exclude 'tmp.mr' collections - var collections = this._master.getDB(db).getCollectionNames(); - var colls_excluding_tmp_mr = collections.filter(coll => !coll.startsWith("tmp.mr.")); - res.master = - this._master.getDB(db).runCommand({dbhash: 1, collections: colls_excluding_tmp_mr}); - res.slaves = []; - slaves.forEach(function(node) { - var isArbiter = node.getDB('admin').isMaster('admin').arbiterOnly; - if (!isArbiter) { - collections = node.getDB(db).getCollectionNames(); - colls_excluding_tmp_mr = collections.filter(coll => { - return !coll.startsWith("tmp.mr."); - }); - var slaveRes = - node.getDB(db).runCommand({dbhash: 1, collections: colls_excluding_tmp_mr}); - res.slaves.push(slaveRes); - } - }); - return res; + const sessions = [ + this._master, + ...slaves.filter(conn => { + return !conn.adminCommand({isMaster: 1}).arbiterOnly; + }) + ].map(conn => conn.getDB('test').getSession()); + + // getHashes() is sometimes called for versions of MongoDB earlier than 4.0 so we cannot use + // the dbHash command directly to filter out capped collections. checkReplicatedDataHashes() + // uses the listCollections command after awaiting replication to determine if a collection + // is capped. + const hashes = this.getHashesUsingSessions(sessions, dbName, {filterCapped: false}); + return {master: hashes[0], slaves: hashes.slice(1)}; }; this.dumpOplog = function(conn, query = {}, limit = 10) { @@ -1590,60 +1700,28 @@ var ReplSetTest = function(opts) { return; } - var primaryColl = primary.getDB(dbName).getCollection(collName); - var secondaryColl = secondary.getDB(dbName).getCollection(collName); - - var primaryDocs = primaryColl.find().sort({_id: 1}).toArray(); - var secondaryDocs = secondaryColl.find().sort({_id: 1}).toArray(); - - var primaryIndex = primaryDocs.length - 1; - var secondaryIndex = secondaryDocs.length - 1; - - var missingOnPrimary = []; - var missingOnSecondary = []; - - while (primaryIndex >= 0 || secondaryIndex >= 0) { - var primaryDoc = primaryDocs[primaryIndex]; - var secondaryDoc = secondaryDocs[secondaryIndex]; - - if (primaryIndex < 0) { - missingOnPrimary.push(tojsononeline(secondaryDoc)); - secondaryIndex--; - } else if (secondaryIndex < 0) { - missingOnSecondary.push(tojsononeline(primaryDoc)); - primaryIndex--; - } else { - if (!bsonBinaryEqual(primaryDoc, secondaryDoc)) { - print('Mismatching documents:'); - print(' primary: ' + tojsononeline(primaryDoc)); - print(' secondary: ' + tojsononeline(secondaryDoc)); - var ordering = - bsonWoCompare({wrapper: primaryDoc._id}, {wrapper: secondaryDoc._id}); - if (ordering === 0) { - primaryIndex--; - secondaryIndex--; - } else if (ordering < 0) { - missingOnPrimary.push(tojsononeline(secondaryDoc)); - secondaryIndex--; - } else if (ordering > 0) { - missingOnSecondary.push(tojsononeline(primaryDoc)); - primaryIndex--; - } - } else { - // Latest document matched. - primaryIndex--; - secondaryIndex--; - } - } + const primarySession = primary.getDB('test').getSession(); + const secondarySession = secondary.getDB('test').getSession(); + const diff = self.getCollectionDiffUsingSessions( + primarySession, secondarySession, dbName, collName); + + for (let { + primary: primaryDoc, secondary: secondaryDoc, + } of diff.docsWithDifferentContents) { + print(`Mismatching documents between the primary ${primary.host}` + + ` and the secondary ${secondary.host}:`); + print(' primary: ' + tojsononeline(primaryDoc)); + print(' secondary: ' + tojsononeline(secondaryDoc)); } - if (missingOnPrimary.length) { - print('The following documents are missing on the primary:'); - print(missingOnPrimary.join('\n')); + if (diff.docsMissingOnPrimary.length > 0) { + print(`The following documents are missing on the primary ${primary.host}:`); + print(diff.docsMissingOnPrimary.map(doc => tojsononeline(doc)).join('\n')); } - if (missingOnSecondary.length) { - print('The following documents are missing on the secondary:'); - print(missingOnSecondary.join('\n')); + + if (diff.docsMissingOnSecondary.length > 0) { + print(`The following documents are missing on the secondary ${secondary.host}:`); + print(diff.docsMissingOnSecondary.map(doc => tojsononeline(doc)).join('\n')); } } diff --git a/src/mongo/shell/utils.js b/src/mongo/shell/utils.js index 07cc666a15a..6c2612a2b03 100644 --- a/src/mongo/shell/utils.js +++ b/src/mongo/shell/utils.js @@ -24,10 +24,16 @@ function reconnect(db) { function _getErrorWithCode(codeOrObj, message) { var e = new Error(message); if (codeOrObj != undefined) { - if (codeOrObj.writeError) { - e.code = codeOrObj.writeError.code; - } else if (codeOrObj.code) { - e.code = codeOrObj.code; + if (codeOrObj.writeError || codeOrObj.code) { + if (codeOrObj.writeError) { + e.code = codeOrObj.writeError.code; + } else if (codeOrObj.code) { + e.code = codeOrObj.code; + } + + if (codeOrObj.hasOwnProperty("errorLabels")) { + e.errorLabels = codeOrObj.errorLabels; + } } else { // At this point assume codeOrObj is a number type e.code = codeOrObj; @@ -305,6 +311,8 @@ jsTestOptions = function() { logRetryAttempts: TestData.logRetryAttempts || false, connectionString: TestData.connectionString || "", skipCheckDBHashes: TestData.skipCheckDBHashes || false, + traceExceptions: TestData.hasOwnProperty("traceExceptions") ? TestData.traceExceptions + : true, }); } return _jsTestOptions; |