summaryrefslogtreecommitdiff
path: root/buildscripts
diff options
context:
space:
mode:
authorMike Grundy <michael.grundy@10gen.com>2016-07-13 14:53:34 -0400
committerMike Grundy <michael.grundy@10gen.com>2016-07-18 09:35:48 -0400
commit9c32789ac1301d2314f606b026239b45123e0ef7 (patch)
tree2fe4281592b4c75a00e3238e1e01782698231be1 /buildscripts
parent15c0e5ba97e17b7f331c8871c67deddfa1fe68d1 (diff)
downloadmongo-9c32789ac1301d2314f606b026239b45123e0ef7.tar.gz
SERVER-24716 Remove the CheckReplDBHashDeprecated hook
(cherry picked from commit f18bb78b469cc980c45ce0e5a3eb66d4c090292e) Conflicts: buildscripts/resmokelib/testing/hooks.py
Diffstat (limited to 'buildscripts')
-rw-r--r--buildscripts/resmokeconfig/suites/aggregation_read_concern_majority_passthrough.yml1
-rw-r--r--buildscripts/resmokeconfig/suites/core_small_oplog.yml1
-rw-r--r--buildscripts/resmokeconfig/suites/core_small_oplog_rs.yml1
-rw-r--r--buildscripts/resmokeconfig/suites/integration_tests_replset.yml1
-rw-r--r--buildscripts/resmokeconfig/suites/jstestfuzz_replication.yml1
-rw-r--r--buildscripts/resmokeconfig/suites/read_concern_majority_passthrough.yml1
-rw-r--r--buildscripts/resmokelib/testing/fixtures/interface.py7
-rw-r--r--buildscripts/resmokelib/testing/fixtures/masterslave.py50
-rw-r--r--buildscripts/resmokelib/testing/fixtures/replicaset.py27
-rw-r--r--buildscripts/resmokelib/testing/hooks.py467
10 files changed, 0 insertions, 557 deletions
diff --git a/buildscripts/resmokeconfig/suites/aggregation_read_concern_majority_passthrough.yml b/buildscripts/resmokeconfig/suites/aggregation_read_concern_majority_passthrough.yml
index 0f4715cc5ff..c73b0df611e 100644
--- a/buildscripts/resmokeconfig/suites/aggregation_read_concern_majority_passthrough.yml
+++ b/buildscripts/resmokeconfig/suites/aggregation_read_concern_majority_passthrough.yml
@@ -19,7 +19,6 @@ executor:
readMode: commands
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: ReplicaSetFixture
diff --git a/buildscripts/resmokeconfig/suites/core_small_oplog.yml b/buildscripts/resmokeconfig/suites/core_small_oplog.yml
index 6cf35f29a6c..503ef35cc28 100644
--- a/buildscripts/resmokeconfig/suites/core_small_oplog.yml
+++ b/buildscripts/resmokeconfig/suites/core_small_oplog.yml
@@ -16,7 +16,6 @@ executor:
readMode: commands
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: MasterSlaveFixture
diff --git a/buildscripts/resmokeconfig/suites/core_small_oplog_rs.yml b/buildscripts/resmokeconfig/suites/core_small_oplog_rs.yml
index 1b5e9507eee..8e62afca387 100644
--- a/buildscripts/resmokeconfig/suites/core_small_oplog_rs.yml
+++ b/buildscripts/resmokeconfig/suites/core_small_oplog_rs.yml
@@ -17,7 +17,6 @@ executor:
readMode: commands
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: ReplicaSetFixture
diff --git a/buildscripts/resmokeconfig/suites/integration_tests_replset.yml b/buildscripts/resmokeconfig/suites/integration_tests_replset.yml
index fba9605ff50..67b455beb4a 100644
--- a/buildscripts/resmokeconfig/suites/integration_tests_replset.yml
+++ b/buildscripts/resmokeconfig/suites/integration_tests_replset.yml
@@ -7,7 +7,6 @@ executor:
config: {}
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: ReplicaSetFixture
diff --git a/buildscripts/resmokeconfig/suites/jstestfuzz_replication.yml b/buildscripts/resmokeconfig/suites/jstestfuzz_replication.yml
index 13b30733416..6027a2cb2bc 100644
--- a/buildscripts/resmokeconfig/suites/jstestfuzz_replication.yml
+++ b/buildscripts/resmokeconfig/suites/jstestfuzz_replication.yml
@@ -10,7 +10,6 @@ executor:
readMode: commands
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: ReplicaSetFixture
diff --git a/buildscripts/resmokeconfig/suites/read_concern_majority_passthrough.yml b/buildscripts/resmokeconfig/suites/read_concern_majority_passthrough.yml
index cef06f52138..c706898c89e 100644
--- a/buildscripts/resmokeconfig/suites/read_concern_majority_passthrough.yml
+++ b/buildscripts/resmokeconfig/suites/read_concern_majority_passthrough.yml
@@ -66,7 +66,6 @@ executor:
readMode: commands
hooks:
- class: ValidateCollections
- - class: CheckReplDBHashDeprecated
- class: CheckReplDBHash
fixture:
class: ReplicaSetFixture
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py
index 5fbf537c107..8921aa1159c 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -96,13 +96,6 @@ class ReplFixture(Fixture):
"""
raise NotImplementedError("get_secondaries must be implemented by ReplFixture subclasses")
- def await_repl(self):
- """
- Blocks until all operations on the primary/master have
- replicated to all other nodes.
- """
- raise NotImplementedError("await_repl must be implemented by ReplFixture subclasses")
-
def retry_until_wtimeout(self, insert_fn):
"""
Given a callback function representing an insert operation on
diff --git a/buildscripts/resmokelib/testing/fixtures/masterslave.py b/buildscripts/resmokelib/testing/fixtures/masterslave.py
index bffa5a4dbcd..2bcd3c5d6d2 100644
--- a/buildscripts/resmokelib/testing/fixtures/masterslave.py
+++ b/buildscripts/resmokelib/testing/fixtures/masterslave.py
@@ -122,56 +122,6 @@ class MasterSlaveFixture(interface.ReplFixture):
def get_secondaries(self):
return [self.slave]
- def await_repl(self):
- """
- Inserts a document into each database on the master and waits
- for all write operations to be acknowledged by the master-slave
- deployment.
- """
-
- client = utils.new_mongo_client(self.port)
-
- self.logger.info("Starting fsync on master on port %d to flush all pending writes",
- self.port)
- client.fsync()
- self.logger.info("fsync on master completed")
-
- # We verify that each database has replicated to the slave because in the case of an initial
- # sync, the slave may acknowledge writes to one database before it has finished syncing
- # others.
- db_names = client.database_names()
- self.logger.info("Awaiting replication of inserts to each of the following databases on"
- " master on port %d: %s",
- self.port,
- db_names)
-
- for db_name in db_names:
- if db_name == "local":
- continue # The local database is expected to differ, ignore.
-
- self.logger.info("Awaiting replication of insert to database %s (w=2, wtimeout=%d min)"
- " to master on port %d",
- db_name,
- interface.ReplFixture.AWAIT_REPL_TIMEOUT_MINS,
- self.port)
-
- # Keep retrying this until it times out waiting for replication.
- def insert_fn(remaining_secs):
- remaining_millis = int(round(remaining_secs * 1000))
- write_concern = pymongo.WriteConcern(w=2, wtimeout=remaining_millis)
- coll = client[db_name].get_collection("await_repl", write_concern=write_concern)
- coll.insert_one({"awaiting": "repl"})
-
- try:
- self.retry_until_wtimeout(insert_fn)
- except pymongo.errors.WTimeoutError:
- self.logger.info("Replication of write operation timed out.")
- raise
-
- self.logger.info("Replication of write operation completed for database %s.", db_name)
-
- self.logger.info("Finished awaiting replication.")
-
def _new_mongod(self, mongod_logger, mongod_options):
"""
Returns a standalone.MongoDFixture with the specified logger and
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index 3b77d5d07f4..c1d6521d5e9 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -151,33 +151,6 @@ class ReplicaSetFixture(interface.ReplFixture):
def get_secondaries(self):
return self.nodes[1:]
- def await_repl(self):
- client = utils.new_mongo_client(port=self.port)
-
- self.logger.info("Starting fsync on primary on port %d to flush all pending writes",
- self.port)
- client.fsync()
- self.logger.info("fsync on primary completed")
-
- self.logger.info("Awaiting replication of insert (w=%d, wtimeout=%d min) to primary on port"
- " %d", self.num_nodes, interface.ReplFixture.AWAIT_REPL_TIMEOUT_MINS,
- self.port)
-
- # Keep retrying this until it times out waiting for replication.
- def insert_fn(remaining_secs):
- remaining_millis = int(round(remaining_secs * 1000))
- write_concern = pymongo.WriteConcern(w=self.num_nodes, wtimeout=remaining_millis)
- coll = client.resmoke.get_collection("await_repl", write_concern=write_concern)
- coll.insert_one({"awaiting": "repl"})
-
- try:
- self.retry_until_wtimeout(insert_fn)
- except pymongo.errors.WTimeoutError:
- self.logger.info("Replication of write operation timed out.")
- raise
-
- self.logger.info("Replication of write operation completed.")
-
def _new_mongod(self, index, replset_name):
"""
Returns a standalone.MongoDFixture configured to be used as a
diff --git a/buildscripts/resmokelib/testing/hooks.py b/buildscripts/resmokelib/testing/hooks.py
index 0a5d895f3f5..1a438cf0867 100644
--- a/buildscripts/resmokelib/testing/hooks.py
+++ b/buildscripts/resmokelib/testing/hooks.py
@@ -183,472 +183,6 @@ class CheckReplDBHash(JsCustomBehavior):
JsCustomBehavior.__init__(self, logger, fixture, js_filename, description)
-# Old version of CheckReplDBHash used to ensure feature parity of new version.
-class CheckReplDBHashDeprecated(CustomBehavior):
- """
- Waits for replication after each test, then checks that the dbhashes
- of all databases other than "local" and "config" match on the primary
- and all of the secondaries. If any dbhashes do not match, logs information
- about what was different (e.g. Different numbers of collections,
- missing documents in a collection, mismatching documents, etc).
-
- Compatible only with ReplFixture subclasses.
- """
-
- def __init__(self, logger, fixture):
- if not isinstance(fixture, fixtures.ReplFixture):
- raise TypeError("%s does not support replication" % (fixture.__class__.__name__))
-
- description = "Check that replica-set nodes are consistent by using the dbHash command"
- CustomBehavior.__init__(self, logger, fixture, description)
-
- self.started = False
- self.hook_test_case = testcases.TestCase(self.logger, "Hook", self.logger_name)
-
- def after_test(self, test, test_report):
- """
- After each test, check that the dbhash of the test database is
- the same on all nodes in the replica set or master/slave
- fixture.
- """
-
- try:
- if not self.started:
- CustomBehavior.start_dynamic_test(self.hook_test_case, test_report)
- self.started = True
-
- primary = self.fixture.get_primary()
- primary_conn = utils.new_mongo_client(port=primary.port)
-
- # Wait until all operations have replicated.
- self.fixture.await_repl()
-
- success = True
- sb = [] # String builder.
-
- for secondary in self.fixture.get_secondaries():
- read_preference = pymongo.ReadPreference.SECONDARY
- secondary_conn = utils.new_mongo_client(port=secondary.port,
- read_preference=read_preference)
- # Skip arbiters.
- if secondary_conn.admin.command("isMaster").get("arbiterOnly", False):
- continue
-
- all_matched = CheckReplDBHashDeprecated._check_all_db_hashes(primary_conn,
- secondary_conn,
- sb)
- if not all_matched:
- sb.insert(0,
- "One or more databases were different between the primary on port %d"
- " and the secondary on port %d:"
- % (primary.port, secondary.port))
-
- success = all_matched and success
-
- if not success:
- CheckReplDBHashDeprecated._dump_oplog(primary_conn, secondary_conn, sb)
-
- # Adding failures to a TestReport requires traceback information, so we raise
- # a 'self.hook_test_case.failureException' that we will catch ourselves.
- self.hook_test_case.logger.info("\n ".join(sb))
- raise self.hook_test_case.failureException("The dbhashes did not match")
- except self.hook_test_case.failureException as err:
- self.hook_test_case.logger.exception("The dbhashes did not match.")
- self.hook_test_case.return_code = 1
- test_report.addFailure(self.hook_test_case, sys.exc_info())
- test_report.stopTest(self.hook_test_case)
- raise errors.ServerFailure(err.args[0])
- except pymongo.errors.WTimeoutError:
- self.hook_test_case.logger.exception("Awaiting replication timed out.")
- self.hook_test_case.return_code = 2
- test_report.addError(self.hook_test_case, sys.exc_info())
- test_report.stopTest(self.hook_test_case)
- raise errors.StopExecution("Awaiting replication timed out")
-
- def after_suite(self, test_report):
- """
- If we get to this point, the #dbhash# test must have been
- successful, so add it to the test report.
- """
-
- if self.started:
- self.hook_test_case.logger.info("The dbhashes matched for all tests.")
- self.hook_test_case.return_code = 0
- test_report.addSuccess(self.hook_test_case)
- # TestReport.stopTest() has already been called if there was a failure.
- test_report.stopTest(self.hook_test_case)
-
- self.started = False
-
- @staticmethod
- def _dump_oplog(primary_conn, secondary_conn, sb):
-
- def dump_latest_docs(coll, limit=0):
- docs = (doc for doc in coll.find().sort("$natural", pymongo.DESCENDING).limit(limit))
- for doc in docs:
- sb.append(" %s" % (doc))
-
- LIMIT = 100
- sb.append("Dumping the latest %d documents from the primary's oplog" % (LIMIT))
- dump_latest_docs(primary_conn.local.oplog.rs, LIMIT)
- sb.append("Dumping the latest %d documents from the secondary's oplog" % (LIMIT))
- dump_latest_docs(secondary_conn.local.oplog.rs, LIMIT)
-
- @staticmethod
- def _check_all_db_hashes(primary_conn, secondary_conn, sb):
- """
- Returns true if for each database, except "local" and
- "config", the dbhash command returns the same MD5 hash on the
- primary as it does on the secondary. Returns false otherwise.
-
- Logs a message describing the differences if any database's
- dbhash did not match.
- """
-
- # Overview of how we'll check that everything replicated correctly between these two nodes:
- #
- # - Check whether they have the same databases.
- # - If not, log which databases are missing where, and dump the contents of any that are
- # missing.
- #
- # - Check whether each database besides "local" and "config" gives the same md5 field as
- # the result of running the dbhash command.
- # - If not, check whether they have the same collections.
- # - If not, log which collections are missing where, and dump the contents of any
- # that are missing.
- # - If so, check that the hash of each non-capped collection matches.
- # - If any do not match, log the diff of the collection between the two nodes.
-
- success = True
-
- if not CheckReplDBHashDeprecated._check_dbs_present(primary_conn, secondary_conn, sb):
- return False
-
- for db_name in primary_conn.database_names():
- if db_name in ["config", "local"]:
- continue # We don't expect these dbs to match across different nodes.
-
- matched = CheckReplDBHashDeprecated._check_db_hash(
- primary_conn, secondary_conn, db_name, sb)
- success = matched and success
-
- return success
-
- @staticmethod
- def _check_dbs_present(primary_conn, secondary_conn, sb):
- """
- Returns true if the list of databases on the primary is
- identical to the list of databases on the secondary, and false
- otherwise.
- """
-
- success = True
- primary_dbs = primary_conn.database_names()
-
- # Can't run database_names() on secondary, so instead use the listDatabases command.
- # TODO: Use database_names() once PYTHON-921 is resolved.
- list_db_output = secondary_conn.admin.command("listDatabases")
- secondary_dbs = [db["name"] for db in list_db_output["databases"]]
-
- # There may be a difference in databases which is not considered an error, when
- # the database only contains system collections. This difference is only logged
- # when others are encountered, i.e., success = False.
- missing_on_primary, missing_on_secondary = CheckReplDBHashDeprecated._check_difference(
- set(primary_dbs), set(secondary_dbs), "database")
-
- for missing_db in missing_on_secondary:
- db = primary_conn[missing_db]
- coll_names = db.collection_names()
- non_system_colls = [name for name in coll_names if not name.startswith("system.")]
-
- # It is only an error if there are any non-system collections in the database,
- # otherwise it's not well defined whether they should exist or not.
- if non_system_colls:
- sb.append("Database %s present on primary but not on secondary." % (missing_db))
- CheckReplDBHashDeprecated._dump_all_collections(db, non_system_colls, sb)
- success = False
-
- for missing_db in missing_on_primary:
- db = secondary_conn[missing_db]
-
- # Can't run collection_names() on secondary, so instead use the listCollections command.
- # TODO: Always use collection_names() once PYTHON-921 is resolved. Then much of the
- # logic that is duplicated here can be consolidated.
- list_coll_output = db.command("listCollections")["cursor"]["firstBatch"]
- coll_names = [coll["name"] for coll in list_coll_output]
- non_system_colls = [name for name in coll_names if not name.startswith("system.")]
-
- # It is only an error if there are any non-system collections in the database,
- # otherwise it's not well defined if it should exist or not.
- if non_system_colls:
- sb.append("Database %s present on secondary but not on primary." % (missing_db))
- CheckReplDBHashDeprecated._dump_all_collections(db, non_system_colls, sb)
- success = False
-
- return success
-
- @staticmethod
- def _check_db_hash(primary_conn, secondary_conn, db_name, sb):
- """
- Returns true if the dbhash for 'db_name' matches on the primary
- and the secondary, and false otherwise.
-
- Appends a message to 'sb' describing the differences if the
- dbhashes do not match.
- """
-
- primary_hash = primary_conn[db_name].command("dbhash")
- secondary_hash = secondary_conn[db_name].command("dbhash")
-
- if primary_hash["md5"] == secondary_hash["md5"]:
- return True
-
- success = CheckReplDBHashDeprecated._check_dbs_eq(
- primary_conn, secondary_conn, primary_hash, secondary_hash, db_name, sb)
-
- if not success:
- sb.append("Database %s has a different hash on the primary and the secondary"
- " ([ %s ] != [ %s ]):"
- % (db_name, primary_hash["md5"], secondary_hash["md5"]))
-
- return success
-
- @staticmethod
- def _check_dbs_eq(primary_conn, secondary_conn, primary_hash, secondary_hash, db_name, sb):
- """
- Returns true if all non-capped collections had the same hash in
- the dbhash response, and false otherwise.
-
- Appends information to 'sb' about the differences between the
- 'db_name' database on the primary and the 'db_name' database on
- the secondary, if any.
- """
-
- success = True
-
- primary_db = primary_conn[db_name]
- secondary_db = secondary_conn[db_name]
-
- primary_coll_hashes = primary_hash["collections"]
- secondary_coll_hashes = secondary_hash["collections"]
-
- primary_coll_names = set(primary_coll_hashes.keys())
- secondary_coll_names = set(secondary_coll_hashes.keys())
-
- missing_on_primary, missing_on_secondary = CheckReplDBHashDeprecated._check_difference(
- primary_coll_names, secondary_coll_names, "collection", sb=sb)
-
- if missing_on_primary or missing_on_secondary:
-
- # 'sb' already describes which collections are missing where.
- for coll_name in missing_on_primary:
- CheckReplDBHashDeprecated._dump_all_documents(secondary_db, coll_name, sb)
- for coll_name in missing_on_secondary:
- CheckReplDBHashDeprecated._dump_all_documents(primary_db, coll_name, sb)
- return
-
- for coll_name in primary_coll_names & secondary_coll_names:
- primary_coll_hash = primary_coll_hashes[coll_name]
- secondary_coll_hash = secondary_coll_hashes[coll_name]
-
- if primary_coll_hash == secondary_coll_hash:
- continue
-
- # Ignore capped collections because they are not expected to match on all nodes.
- if primary_db.command({"collStats": coll_name})["capped"]:
- # Still fail if the collection is not capped on the secondary.
- if not secondary_db.command({"collStats": coll_name})["capped"]:
- success = False
- sb.append("%s.%s collection is capped on primary but not on secondary."
- % (primary_db.name, coll_name))
- sb.append("%s.%s collection is capped, ignoring." % (primary_db.name, coll_name))
- continue
- # Still fail if the collection is capped on the secondary, but not on the primary.
- elif secondary_db.command({"collStats": coll_name})["capped"]:
- success = False
- sb.append("%s.%s collection is capped on secondary but not on primary."
- % (primary_db.name, coll_name))
- continue
-
- success = False
- sb.append("Collection %s.%s has a different hash on the primary and the secondary"
- " ([ %s ] != [ %s ]):"
- % (db_name, coll_name, primary_coll_hash, secondary_coll_hash))
- CheckReplDBHashDeprecated._check_colls_eq(primary_db, secondary_db, coll_name, sb)
-
- if success:
- sb.append("All collections that were expected to match did.")
- return success
-
- @staticmethod
- def _check_colls_eq(primary_db, secondary_db, coll_name, sb):
- """
- Appends information to 'sb' about the differences or between
- the 'coll_name' collection on the primary and the 'coll_name'
- collection on the secondary, if any.
- """
-
- codec_options = bson.CodecOptions(document_class=TypeSensitiveSON)
-
- primary_coll = primary_db.get_collection(coll_name, codec_options=codec_options)
- secondary_coll = secondary_db.get_collection(coll_name, codec_options=codec_options)
-
- primary_docs = CheckReplDBHashDeprecated._extract_documents(primary_coll)
- secondary_docs = CheckReplDBHashDeprecated._extract_documents(secondary_coll)
-
- CheckReplDBHashDeprecated._get_collection_diff(primary_docs, secondary_docs, sb)
-
- @staticmethod
- def _extract_documents(collection):
- """
- Returns a list of all documents in the collection, sorted by
- their _id.
- """
-
- return [doc for doc in collection.find().sort("_id", pymongo.ASCENDING)]
-
- @staticmethod
- def _get_collection_diff(primary_docs, secondary_docs, sb):
- """
- Returns true if the documents in 'primary_docs' exactly match
- the documents in 'secondary_docs', and false otherwise.
-
- Appends information to 'sb' about what matched or did not match.
- """
-
- matched = True
-
- # These need to be lists instead of sets because documents aren't hashable.
- missing_on_primary = []
- missing_on_secondary = []
-
- p_idx = 0 # Keep track of our position in 'primary_docs'.
- s_idx = 0 # Keep track of our position in 'secondary_docs'.
-
- while p_idx < len(primary_docs) and s_idx < len(secondary_docs):
- primary_doc = primary_docs[p_idx]
- secondary_doc = secondary_docs[s_idx]
-
- if primary_doc == secondary_doc:
- p_idx += 1
- s_idx += 1
- continue
-
- # We have mismatching documents.
- matched = False
-
- if primary_doc["_id"] == secondary_doc["_id"]:
- sb.append("Mismatching document:")
- sb.append(" primary: %s" % (primary_doc))
- sb.append(" secondary: %s" % (secondary_doc))
- p_idx += 1
- s_idx += 1
-
- # One node was missing a document. Since the documents are sorted by _id, the doc with
- # the smaller _id was the one that was skipped.
- elif primary_doc["_id"] < secondary_doc["_id"]:
- missing_on_secondary.append(primary_doc)
-
- # Only move past the doc that we know was skipped.
- p_idx += 1
-
- else: # primary_doc["_id"] > secondary_doc["_id"]
- missing_on_primary.append(secondary_doc)
-
- # Only move past the doc that we know was skipped.
- s_idx += 1
-
- # Check if there are any unmatched documents left.
- while p_idx < len(primary_docs):
- matched = False
- missing_on_secondary.append(primary_docs[p_idx])
- p_idx += 1
- while s_idx < len(secondary_docs):
- matched = False
- missing_on_primary.append(secondary_docs[s_idx])
- s_idx += 1
-
- if not matched:
- CheckReplDBHashDeprecated._append_differences(
- missing_on_primary, missing_on_secondary, "document", sb)
- else:
- sb.append("All documents matched.")
-
- @staticmethod
- def _check_difference(primary_set, secondary_set, item_type_name, sb=None):
- """
- Returns true if the contents of 'primary_set' and
- 'secondary_set' are identical, and false otherwise. The sets
- contain information about the primary and secondary,
- respectively, e.g. the database names that exist on each node.
-
- Appends information about anything that differed to 'sb'.
- """
-
- missing_on_primary = set()
- missing_on_secondary = set()
-
- for item in primary_set - secondary_set:
- missing_on_secondary.add(item)
-
- for item in secondary_set - primary_set:
- missing_on_primary.add(item)
-
- if sb is not None:
- CheckReplDBHashDeprecated._append_differences(
- missing_on_primary, missing_on_secondary, item_type_name, sb)
-
- return (missing_on_primary, missing_on_secondary)
-
- @staticmethod
- def _append_differences(missing_on_primary, missing_on_secondary, item_type_name, sb):
- """
- Given two iterables representing items that were missing on the
- primary or the secondary respectively, append the information
- about which items were missing to 'sb', if any.
- """
-
- if missing_on_primary:
- sb.append("The following %ss were present on the secondary, but not on the"
- " primary:" % (item_type_name))
- for item in missing_on_primary:
- sb.append(str(item))
-
- if missing_on_secondary:
- sb.append("The following %ss were present on the primary, but not on the"
- " secondary:" % (item_type_name))
- for item in missing_on_secondary:
- sb.append(str(item))
-
- @staticmethod
- def _dump_all_collections(database, coll_names, sb):
- """
- Appends the contents of each of the collections in 'coll_names'
- to 'sb'.
- """
-
- if coll_names:
- sb.append("Database %s contains the following collections: %s"
- % (database.name, coll_names))
- for coll_name in coll_names:
- CheckReplDBHashDeprecated._dump_all_documents(database, coll_name, sb)
- else:
- sb.append("No collections in database %s." % (database.name))
-
- @staticmethod
- def _dump_all_documents(database, coll_name, sb):
- """
- Appends the contents of 'coll_name' to 'sb'.
- """
-
- docs = CheckReplDBHashDeprecated._extract_documents(database[coll_name])
- if docs:
- sb.append("Documents in %s.%s:" % (database.name, coll_name))
- for doc in docs:
- sb.append(" %s" % (doc))
- else:
- sb.append("No documents in %s.%s." % (database.name, coll_name))
-
class TypeSensitiveSON(bson.SON):
"""
Extends bson.SON to perform additional type-checking of document values
@@ -680,6 +214,5 @@ class TypeSensitiveSON(bson.SON):
_CUSTOM_BEHAVIORS = {
"CleanEveryN": CleanEveryN,
"CheckReplDBHash": CheckReplDBHash,
- "CheckReplDBHashDeprecated": CheckReplDBHashDeprecated,
"ValidateCollections": ValidateCollections,
}