summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Abrahams <jonathan@mongodb.com>2018-05-10 11:49:47 -0400
committerJonathan Abrahams <jonathan@mongodb.com>2018-05-14 14:13:25 -0400
commit9db7db6586d621ab41430a26155d9c46ce32c046 (patch)
treec78df1326706e66201efda7e8f6fdaf5102930cd
parent41b83a16fd3d308a24f2e001759d8415dc348ddf (diff)
downloadmongo-9db7db6586d621ab41430a26155d9c46ce32c046.tar.gz
SERVER-30204 Use a new CleanupConcurrencyWorkloads hook instead of CleanEveryN for FSM suites using resmoke_runner.js
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency.yml5
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency_replication.yml5
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency.yml5
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency_and_balancer.yml5
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency_sharded_replication.yml5
-rw-r--r--buildscripts/resmokeconfig/suites/concurrency_sharded_replication_with_balancer.yml5
-rw-r--r--buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py87
-rw-r--r--buildscripts/resmokelib/testing/testcases/fsm_workload_test.py17
-rw-r--r--jstests/concurrency/fsm_utils/name_utils.js11
9 files changed, 114 insertions, 31 deletions
diff --git a/buildscripts/resmokeconfig/suites/concurrency.yml b/buildscripts/resmokeconfig/suites/concurrency.yml
index 4bcda260f16..d30b5078b67 100644
--- a/buildscripts/resmokeconfig/suites/concurrency.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency.yml
@@ -18,10 +18,7 @@ executor:
global_vars:
TestData:
skipValidationOnNamespaceNotFound: false
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: MongoDFixture
mongod_options:
diff --git a/buildscripts/resmokeconfig/suites/concurrency_replication.yml b/buildscripts/resmokeconfig/suites/concurrency_replication.yml
index 7716e696ca8..7ae625b3f58 100644
--- a/buildscripts/resmokeconfig/suites/concurrency_replication.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency_replication.yml
@@ -30,10 +30,7 @@ executor:
# TODO SERVER-26466: Add CheckReplOplogs hook to the concurrency suite.
- class: CheckReplDBHash
- class: ValidateCollections
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: ReplicaSetFixture
mongod_options:
diff --git a/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency.yml b/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency.yml
index 884dd9e5899..5cfe166fbd2 100644
--- a/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency.yml
@@ -129,10 +129,7 @@ executor:
hooks:
- class: CheckReplDBHash
- class: ValidateCollections
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: ShardedClusterFixture
enable_balancer: false
diff --git a/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency_and_balancer.yml b/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency_and_balancer.yml
index 224a5ed1c1b..0ac1c1486f0 100644
--- a/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency_and_balancer.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency_sharded_causal_consistency_and_balancer.yml
@@ -134,10 +134,7 @@ executor:
hooks:
- class: CheckReplDBHash
- class: ValidateCollections
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: ShardedClusterFixture
mongos_options:
diff --git a/buildscripts/resmokeconfig/suites/concurrency_sharded_replication.yml b/buildscripts/resmokeconfig/suites/concurrency_sharded_replication.yml
index aaa9181a5db..50d381157b4 100644
--- a/buildscripts/resmokeconfig/suites/concurrency_sharded_replication.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency_sharded_replication.yml
@@ -125,10 +125,7 @@ executor:
hooks:
- class: CheckReplDBHash
- class: ValidateCollections
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: ShardedClusterFixture
enable_balancer: false
diff --git a/buildscripts/resmokeconfig/suites/concurrency_sharded_replication_with_balancer.yml b/buildscripts/resmokeconfig/suites/concurrency_sharded_replication_with_balancer.yml
index b15d3ab98d0..01e3244be22 100644
--- a/buildscripts/resmokeconfig/suites/concurrency_sharded_replication_with_balancer.yml
+++ b/buildscripts/resmokeconfig/suites/concurrency_sharded_replication_with_balancer.yml
@@ -130,10 +130,7 @@ executor:
hooks:
- class: CheckReplDBHash
- class: ValidateCollections
- # TODO SERVER-30204: Avoid restarting the MongoDB deployment in order to delete all of the data
- # files from earlier FSM workloads.
- - class: CleanEveryN
- n: 20
+ - class: CleanupConcurrencyWorkloads
fixture:
class: ShardedClusterFixture
mongos_options:
diff --git a/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py b/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py
new file mode 100644
index 00000000000..48daa5b38ef
--- /dev/null
+++ b/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py
@@ -0,0 +1,87 @@
+"""Test hook for dropping databases created by the fixture."""
+
+from __future__ import absolute_import
+
+import copy
+
+from . import interface
+from ... import utils
+
+
+class CleanupConcurrencyWorkloads(interface.Hook):
+ """Drop all databases, except those that have been excluded.
+
+ For concurrency tests that run on different DBs, drop all databases except ones
+ in 'exclude_dbs'.
+ For tests that run on the same DB, drop all databases except ones in 'exclude_dbs'
+ and the DB used by the test/workloads.
+ For tests that run on the same collection, drop all collections in all databases
+ except for 'exclude_dbs' and the collection used by the test/workloads.
+ """
+
+ def __init__( #pylint: disable=too-many-arguments
+ self, hook_logger, fixture, exclude_dbs=None, same_collection=False, same_db=False):
+ """Initialize CleanupConcurrencyWorkloads."""
+ description = "CleanupConcurrencyWorkloads drops all databases in the fixture"
+ interface.Hook.__init__(self, hook_logger, fixture, description)
+
+ protected_dbs = ["admin", "config", "local", "$external"]
+ self.exclude_dbs = list(set().union(protected_dbs, utils.default_if_none(exclude_dbs, [])))
+ self.same_collection_name = None
+ self.same_db_name = None
+ if same_db or same_collection:
+ # The db name is defined in jstests/concurrency/fsm_utils/name_utils.js.
+ self.same_db_name = "fsmdb0"
+ if same_collection:
+ # The collection name is defined in jstests/concurrency/fsm_utils/name_utils.js.
+ self.same_collection_name = "fsmcoll0"
+
+ def after_test(self, test, test_report):
+ """After test cleanup."""
+ hook_test_case = CleanupConcurrencyWorkloadsTestCase.create_after_test(
+ self.logger.test_case_logger, test, self)
+ hook_test_case.configure(self.fixture)
+ hook_test_case.run_dynamic_test(test_report)
+
+
+class CleanupConcurrencyWorkloadsTestCase(interface.DynamicTestCase):
+ """DropDatabasesTestCase class."""
+
+ def _find_same_db_name(self, dbs):
+ """Find full name of same_db_name."""
+ for db in dbs:
+ if db.endswith(self._hook.same_db_name):
+ return db
+ return None
+
+ def run_test(self):
+ """Execute drop databases hook."""
+ same_db_name = None
+ try:
+ client = self._hook.fixture.mongo_client()
+ db_names = client.database_names()
+ exclude_dbs = copy.copy(self._hook.exclude_dbs)
+ if self._hook.same_db_name:
+ same_db_name = self._find_same_db_name(db_names)
+ if same_db_name:
+ exclude_dbs.append(same_db_name)
+ self.logger.info("Dropping all databases except for %s", exclude_dbs)
+ for db_name in [db for db in db_names if db not in exclude_dbs]:
+ self.logger.info("Dropping database %s", db_name)
+ client.drop_database(db_name)
+ except:
+ self.logger.exception("Encountered an error while dropping database %s.", db)
+ raise
+
+ if self._hook.same_collection_name and same_db_name:
+ self.logger.info("Dropping all collections in db %s except for %s", same_db_name,
+ self._hook.same_collection_name)
+ try:
+ colls = client[same_db_name].collection_names()
+ for coll in [coll for coll in colls if coll != self._hook.same_collection_name]:
+ self.logger.info("Dropping db %s collection %s", same_db_name, coll)
+ client[same_db_name].drop_collection(coll)
+ except:
+ self.logger.exception("Encountered an error while dropping db % collection %s.",
+ same_db_name, coll)
+ raise
diff --git a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
index 908c9ffac93..ea8f91692fb 100644
--- a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
+++ b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
@@ -16,9 +16,14 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase):
_COUNTER_LOCK = threading.Lock()
_COUNTER = 0
- def __init__(self, logger, fsm_workload, shell_executable=None, shell_options=None):
+ def __init__( #pylint: disable=too-many-arguments
+ self, logger, fsm_workload, shell_executable=None, shell_options=None, same_db=False,
+ same_collection=False, db_name_prefix=None):
"""Initialize the FSMWorkloadTestCase with the FSM workload file."""
+ self.same_collection = same_collection
+ self.same_db = same_db or self.same_collection
+ self.db_name_prefix = db_name_prefix
jsrunnerfile.JSRunnerFileTestCase.__init__(
self, logger, "FSM workload", fsm_workload,
test_runner_file="jstests/concurrency/fsm_libs/resmoke_runner.js",
@@ -35,10 +40,16 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase):
with FSMWorkloadTestCase._COUNTER_LOCK:
count = FSMWorkloadTestCase._COUNTER
- FSMWorkloadTestCase._COUNTER += 1
+ if not self.same_db:
+ FSMWorkloadTestCase._COUNTER += 1
# We use a global incrementing counter as a prefix for the database name to avoid any
# collection lifecycle related issues in sharded clusters. This more closely matches how
# uniqueDBName() and uniqueCollName() would have returned distinct values when called once
# for each FSM workload in the entire schedule by runner.js.
- test_data["dbNamePrefix"] = "test{:d}_".format(count)
+ test_prefix = self.db_name_prefix if self.db_name_prefix else "test"
+ test_data["dbNamePrefix"] = "{}{:d}_".format(test_prefix, count)
+ if not self.same_db:
+ test_data["sameDB"] = True
+ if not self.same_collection:
+ test_data["sameCollection"] = True
diff --git a/jstests/concurrency/fsm_utils/name_utils.js b/jstests/concurrency/fsm_utils/name_utils.js
index 7f845603ffc..4c9d6d85fbb 100644
--- a/jstests/concurrency/fsm_utils/name_utils.js
+++ b/jstests/concurrency/fsm_utils/name_utils.js
@@ -3,29 +3,32 @@
/**
* Helpers for generating names of databases and collections
* to execute workloads against.
+ * The DB and collections names here are synchronized with the
+ * the names found in the CleanupConcurrencyWorkloads hook
+ * in resmoke.
*/
if (typeof uniqueDBName === 'undefined') {
// Returns a unique database name:
- // <dbNamePrefix>db0, <dbNamePrefix>db1, ...
+ // <dbNamePrefix>fsmdb0, <dbNamePrefix>fsmdb1, ...
var uniqueDBName = (function(dbNamePrefix) {
var i = 0;
return function(dbNamePrefix) {
var prefix = dbNamePrefix || '';
- return prefix + 'db' + i++;
+ return prefix + 'fsmdb' + i++;
};
})();
}
if (typeof uniqueCollName === 'undefined') {
// Returns a unique collection name:
- // coll0, coll1, ...
+ // fsmcoll0, fsmcoll1, ...
var uniqueCollName = (function() {
var i = 0;
return function() {
- return 'coll' + i++;
+ return 'fsmcoll' + i++;
};
})();
}