summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--SConstruct2
-rwxr-xr-xbuildscripts/resmoke.py226
-rwxr-xr-xbuildscripts/resmoke_build_metadata.py103
-rwxr-xr-xbuildscripts/smoke.py50
-rw-r--r--buildscripts/smoke/README139
-rw-r--r--buildscripts/smoke/__init__.py9
-rw-r--r--buildscripts/smoke/executor.py137
-rw-r--r--buildscripts/smoke/external_programs.py352
-rw-r--r--buildscripts/smoke/fixtures.py314
-rwxr-xr-xbuildscripts/smoke/json_options.py476
-rw-r--r--buildscripts/smoke/mongodb_network.py29
-rw-r--r--buildscripts/smoke/mongodb_programs.py316
-rw-r--r--buildscripts/smoke/suites.py105
-rw-r--r--buildscripts/smoke/testers.py201
-rw-r--r--buildscripts/smoke/tests.py333
-rw-r--r--buildscripts/smoke_config/__init__.py26
-rw-r--r--buildscripts/smoke_config/auth.yaml27
-rw-r--r--buildscripts/smoke_config/auth_shell.yaml21
-rw-r--r--buildscripts/smoke_config/disk.yaml11
-rw-r--r--buildscripts/smoke_config/executor_default.yaml9
-rw-r--r--buildscripts/smoke_config/jscore.yaml21
-rw-r--r--buildscripts/smoke_config/log_console.yaml16
-rw-r--r--buildscripts/smoke_config/log_default.yaml39
-rw-r--r--buildscripts/smoke_config/log_file.yaml39
-rw-r--r--buildscripts/smoke_config/log_suppress.yaml20
-rw-r--r--buildscripts/smoke_config/master_slave.yaml8
-rw-r--r--buildscripts/smoke_config/no_server.yaml9
-rw-r--r--buildscripts/smoke_config/replicasets.yaml11
-rw-r--r--buildscripts/smoke_config/sharding.yaml11
-rw-r--r--buildscripts/smoke_config/unittests.yaml7
-rw-r--r--buildscripts/smoke_config/with_server.yaml15
-rw-r--r--jstests/sharding/cleanup_orphaned_cmd_hashed.js1
-rw-r--r--jstests/sharding/hash_basic.js3
-rw-r--r--jstests/sharding/hash_shard1.js1
-rw-r--r--jstests/sharding/hash_shard_non_empty.js1
-rw-r--r--jstests/sharding/hash_shard_num_chunks.js1
-rw-r--r--jstests/sharding/hash_shard_unique_compound.js1
-rw-r--r--jstests/sharding/hash_single_shard.js1
-rw-r--r--jstests/sharding/hash_skey_split.js3
-rw-r--r--jstests/sharding/index1.js4
-rw-r--r--jstests/sharding/missing_key.js1
-rw-r--r--jstests/sharding/regex_targeting.js1
-rw-r--r--site_scons/site_tools/mongo_unittest.py (renamed from site_scons/site_tools/unittest.py)21
43 files changed, 3120 insertions, 1 deletions
diff --git a/SConstruct b/SConstruct
index 718206afc85..8977533d1a7 100644
--- a/SConstruct
+++ b/SConstruct
@@ -463,7 +463,7 @@ envDict = dict(BUILD_ROOT=buildDir,
ARCHIVE_ADDITIONS=[],
PYTHON=utils.find_python(),
SERVER_ARCHIVE='${SERVER_DIST_BASENAME}${DIST_ARCHIVE_SUFFIX}',
- tools=["default", "gch", "jsheader", "mergelib", "unittest"],
+ tools=["default", "gch", "jsheader", "mergelib", "mongo_unittest"],
UNITTEST_ALIAS='unittests',
# TODO: Move unittests.txt to $BUILD_DIR, but that requires
# changes to MCI.
diff --git a/buildscripts/resmoke.py b/buildscripts/resmoke.py
new file mode 100755
index 00000000000..92b00bee72d
--- /dev/null
+++ b/buildscripts/resmoke.py
@@ -0,0 +1,226 @@
+#!/usr/bin/python
+
+"""
+Command line test utility for MongoDB tests of all kinds.
+
+CURRENTLY IN ACTIVE DEVELOPMENT
+If you are not a developer, you probably want to use smoke.py
+"""
+
+import logging
+import logging.config
+import optparse
+import os
+import re
+import urllib
+
+import smoke
+import smoke_config
+
+USAGE = \
+ """resmoke.py <YAML/JSON CONFIG>
+
+All options are specified as YAML or JSON - the configuration can be loaded via a file, as a named
+configuration in the "smoke_config" module, piped as stdin, or specified on the command line as
+options via the --set, --unset, and --push operators.
+
+NOTE: YAML can only be used if the PyYaml library is available on your system. Only JSON is
+supported on the command line.
+
+For example:
+ resmoke.py './jstests/disk/*.js'
+
+results in:
+
+ Test Configuration:
+ ---
+ tests:
+ roots:
+ - ./jstests/disk/*.js
+ suite:
+ ...
+ executor:
+ fixtures:
+ ...
+ testers:
+ ...
+ logging:
+ ...
+
+Named sets of options are available in the "smoke_config" module, including:
+
+ --jscore
+ --sharding
+ --replicasets
+ --disk
+
+For example:
+ resmoke.py --jscore
+ resmoke.py --sharding
+
+""" + smoke.json_options.JSONOptionParser.DEFAULT_USAGE
+
+DEFAULT_LOGGER_CONFIG = {}
+
+
+def get_local_logger_filenames(logging_root):
+ """Helper to extract filenames from the logging config for helpful reporting to the user."""
+
+ filenames = []
+ if "handlers" not in logging_root:
+ return filenames
+
+ for handler_name, handler_info in logging_root["handlers"].iteritems():
+ if "filename" in handler_info:
+ logger_filename = handler_info["filename"]
+ filenames.append("file://%s" %
+ urllib.pathname2url(os.path.abspath(logger_filename)))
+
+ return filenames
+
+
+def main():
+
+ named_configs = smoke_config.get_named_configs()
+
+ parser = smoke.json_options.JSONOptionParser(usage=USAGE,
+ configfile_args=named_configs)
+
+ help = \
+ """Just outputs the configured JSON options."""
+
+ parser.add_option('--dump-options', default=False, dest='dump_options', action="store_true",
+ help=help)
+
+ help = \
+ """Outputs all the tests found with metadata."""
+
+ parser.add_option('--dump-tests', default=False, dest='dump_tests', action="store_true",
+ help=help)
+
+ help = \
+ """Outputs the tests in the suite."""
+
+ parser.add_option('--dump-suite', default=False, dest='dump_suite', action="store_true",
+ help=help)
+
+ values, args, json_root = parser.parse_json_args()
+
+ # Assume remaining arguments are test roots
+ if args:
+ json_root = smoke.json_options.json_update_path(json_root, "tests.roots", args)
+
+ # Assume all files in suite if not specified
+ if "suite" not in json_root or json_root["suite"] is None:
+ json_root["suite"] = {}
+
+ # Assume default_logging if no other logging specified
+ if "logging" not in json_root or json_root["logging"] is None:
+ default_logging = \
+ smoke.json_options.json_file_load(named_configs["log_default"])
+ json_root["logging"] = default_logging["logging"]
+
+ if "executor" not in json_root or json_root["executor"] is None:
+ default_executor = \
+ smoke.json_options.json_file_load(named_configs["executor_default"])
+ json_root["executor"] = default_executor["executor"]
+
+ if not values.dump_options:
+ print "Test Configuration: \n---"
+
+ for key in ["tests", "suite", "executor", "logging"]:
+ if key in json_root:
+ print smoke.json_options.json_dump({key: json_root[key]}),
+ print
+
+ if values.dump_options:
+ return
+
+ def validate_config(tests=None, suite=None, executor=None, logging=None, **kwargs):
+
+ if len(kwargs) > 0:
+ raise optparse.OptionValueError(
+ "Unrecognized test options: %s" % kwargs)
+
+ if not all([tests is not None, executor is not None]):
+ raise optparse.OptionValueError(
+ "Test options must contain \"tests\" and \"executor\".")
+
+ validate_config(**json_root)
+ logging.config.dictConfig(json_root["logging"])
+
+ def re_compile_all(re_patterns):
+ if isinstance(re_patterns, basestring):
+ re_patterns = [re_patterns]
+ return [re.compile(pattern) for pattern in re_patterns]
+
+ def build_tests(roots=["./"],
+ include_files=[],
+ include_files_except=[],
+ exclude_files=[],
+ exclude_files_except=[],
+ extract_metadata=True,
+ **kwargs):
+
+ if len(kwargs) > 0:
+ raise optparse.OptionValueError(
+ "Unrecognized options for tests: %s" % kwargs)
+
+ file_regex_query = smoke.suites.RegexQuery(re_compile_all(include_files),
+ re_compile_all(
+ include_files_except),
+ re_compile_all(
+ exclude_files),
+ re_compile_all(exclude_files_except))
+
+ if isinstance(roots, basestring):
+ roots = [roots]
+
+ return smoke.tests.build_tests(roots, file_regex_query, extract_metadata)
+
+ tests = build_tests(**json_root["tests"])
+
+ if values.dump_tests:
+ print "Tests:\n%s" % tests
+
+ def build_suite(tests,
+ include_tags=[],
+ include_tags_except=[],
+ exclude_tags=[],
+ exclude_tags_except=[],
+ **kwargs):
+
+ if len(kwargs) > 0:
+ raise optparse.OptionValueError(
+ "Unrecognized options for suite: %s" % kwargs)
+
+ tag_regex_query = smoke.suites.RegexQuery(re_compile_all(include_tags),
+ re_compile_all(
+ include_tags_except),
+ re_compile_all(exclude_tags),
+ re_compile_all(exclude_tags_except))
+
+ return smoke.suites.build_suite(tests, tag_regex_query)
+
+ suite = build_suite(tests, **json_root["suite"])
+ suite.sort(key=lambda test: test.uri)
+
+ if values.dump_suite:
+ print "Suite:\n%s" % suite
+
+ print "Running %s tests in suite (out of %s tests found)..." % (len(tests), len(suite))
+
+ local_logger_filenames = get_local_logger_filenames(json_root["logging"])
+ if local_logger_filenames:
+ print "\nOutput from tests redirected to:\n\t%s\n" % \
+ "\n\t".join(local_logger_filenames)
+
+ try:
+ smoke.executor.exec_suite(suite, logging.getLogger("executor"), **json_root["executor"])
+ finally:
+ if local_logger_filenames:
+ print "\nOutput from tests was redirected to:\n\t%s\n" % \
+ "\n\t".join(local_logger_filenames)
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/resmoke_build_metadata.py b/buildscripts/resmoke_build_metadata.py
new file mode 100755
index 00000000000..ee489118214
--- /dev/null
+++ b/buildscripts/resmoke_build_metadata.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+"""
+Sample utility to build test metadata JSON (i.e. tags) from test files that contain them.
+
+CURRENTLY IN ACTIVE DEVELOPMENT
+If you are not a developer, you probably want to look at smoke.py
+"""
+
+import re
+
+import smoke
+import smoke_config
+
+USAGE = \
+ """resmoke_build_metadata.py <YAML/JSON CONFIG>
+
+Generates test metadata based on information in test files themselves. All options are specified \
+as YAML or JSON - the configuration is the "tests" subset of the configuration for a resmoke.py
+test run.
+
+NOTE: YAML can only be used if the PyYaml library is available on your system. Only JSON is
+supported on the command line.
+
+For example:
+ resmoke_build_metadata.py './jstests/disk/*.js'
+
+results in:
+
+ Metadata extraction configuration:
+ ---
+ tests:
+ roots:
+ - ./jstests/disk/*.js
+ ...
+
+Named sets of options are available in the "smoke_config" module, including:
+
+ --jscore
+ --sharding
+ --replicasets
+ --disk
+
+For example:
+ resmoke.py --jscore
+ resmoke.py --sharding
+
+""" + smoke.json_options.JSONOptionParser.DEFAULT_USAGE
+
+
+def main():
+
+ parser = smoke.json_options.JSONOptionParser(usage=USAGE,
+ configfile_args=smoke_config.get_named_configs())
+
+ values, args, json_root = parser.parse_json_args()
+
+ if "tests" in json_root:
+ json_root = {"tests": json_root["tests"]}
+
+ # Assume remaining arguments are test roots
+ if args:
+ json_root = smoke.json_options.json_update_path(json_root, "tests.roots", args)
+
+ print "Metadata extraction configuration:"
+ print smoke.json_options.json_dump(json_root)
+
+ if not "tests" in json_root or json_root["tests"] is None:
+ raise Exception("No tests specified.")
+
+ def re_compile_all(re_patterns):
+ if isinstance(re_patterns, basestring):
+ re_patterns = [re_patterns]
+ return [re.compile(pattern) for pattern in re_patterns]
+
+ def build_test_metadata(roots=["./"],
+ include_files=[],
+ include_files_except=[],
+ exclude_files=[],
+ exclude_files_except=[],
+ **kwargs):
+
+ if len(kwargs) > 0:
+ raise optparse.OptionValueError(
+ "Unrecognized options for building test metadata: %s" % kwargs)
+
+ file_regex_query = smoke.suites.RegexQuery(re_compile_all(include_files),
+ re_compile_all(
+ include_files_except),
+ re_compile_all(
+ exclude_files),
+ re_compile_all(exclude_files_except))
+
+ tests = smoke.tests.build_tests(roots, file_regex_query, extract_metadata=True)
+
+ print "Writing test metadata for %s tests..." % len(tests)
+ smoke.tests.write_metadata(tests, json_only=True)
+ print "Test metadata written."
+
+ build_test_metadata(**json_root["tests"])
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/smoke.py b/buildscripts/smoke.py
index 37b533f2cc5..9c185079462 100755
--- a/buildscripts/smoke.py
+++ b/buildscripts/smoke.py
@@ -54,6 +54,7 @@ from pymongo import Connection
from pymongo.errors import OperationFailure
import cleanbb
+import smoke
import utils
try:
@@ -1002,11 +1003,36 @@ def expand_suites(suites,expandUseDB=True):
return tests
+
+def filter_tests_by_tag(tests, tag_query):
+ """Selects tests from a list based on a query over the tags in the tests."""
+
+ test_map = {}
+ roots = []
+ for test in tests:
+ root = os.path.abspath(test[0])
+ roots.append(root)
+ test_map[root] = test
+
+ new_style_tests = smoke.tests.build_tests(roots, extract_metadata=True)
+ new_style_tests = smoke.suites.build_suite(new_style_tests, tag_query)
+
+ print "\nTag query matches %s tests out of %s.\n" % (len(new_style_tests),
+ len(tests))
+
+ tests = []
+ for new_style_test in new_style_tests:
+ tests.append(test_map[os.path.abspath(new_style_test.filename)])
+
+ return tests
+
+
def add_exe(e):
if os.sys.platform.startswith( "win" ) and not e.endswith( ".exe" ):
e += ".exe"
return e
+
def set_globals(options, tests):
global mongod_executable, mongod_port, shell_executable, continue_on_failure
global small_oplog, small_oplog_rs
@@ -1259,6 +1285,14 @@ def main():
parser.add_option('--shell-write-mode', dest='shell_write_mode', default="commands",
help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
+ parser.add_option('--include-tags', dest='include_tags', default="", action='store',
+ help='Filters jstests run by tag regex(es) - a tag in the test must match the regexes. ' +
+ 'Specify single regex string or JSON array.')
+
+ parser.add_option('--exclude-tags', dest='exclude_tags', default="", action='store',
+ help='Filters jstests run by tag regex(es) - no tags in the test must match the regexes. ' +
+ 'Specify single regex string or JSON array.')
+
global tests
(options, tests) = parser.parse_args()
@@ -1313,6 +1347,22 @@ def main():
tests = filter( ignore_test, tests )
+ if options.include_tags or options.exclude_tags:
+
+ def to_regex_array(tags_option):
+ if not tags_option:
+ return []
+
+ tags_list = smoke.json_options.json_coerce(tags_option)
+ if isinstance(tags_list, basestring):
+ tags_list = [tags_list]
+
+ return map(re.compile, tags_list)
+
+ tests = filter_tests_by_tag(tests,
+ smoke.suites.RegexQuery(include_res=to_regex_array(options.include_tags),
+ exclude_res=to_regex_array(options.exclude_tags)))
+
if not tests:
print "warning: no tests specified"
return
diff --git a/buildscripts/smoke/README b/buildscripts/smoke/README
new file mode 100644
index 00000000000..09f9f424d49
--- /dev/null
+++ b/buildscripts/smoke/README
@@ -0,0 +1,139 @@
+new smoke module README
+
+CURRENTLY IN ACTIVE DEVELOPMENT
+
+This directory provides a POC implementation of a new test runner. Features include:
+
+ - Test metadata and test tagging
+ - Pluggable and isolated test APIs...
+ - ...for different test types
+ - Simple JSON/YAML (re)configuration
+
+RUNNING:
+
+For command line options invoke:
+
+ $ ./buildscripts/resmoke.py --help
+
+The smoke test runner is completely configured by a JSON/YAML configuration - this configuration can either be loaded from file(s) or built at the command line using --set/--unset/--push manipulations to "nested.path.specifiers". For basic testing this isn't necessary however, configuration files have already been provided and are available using special command line options, as shown below.
+
+Some simple examples:
+
+Run a MongoDB test suite:
+
+ $ ./buildscripts/resmoke.py --jscore
+ $ ./buildscripts/resmoke.py --disk
+ $ ./buildscripts/resmoke.py --sharding
+ $ ./buildscripts/resmoke.py --replicasets
+ (more to come)
+
+To run selected files inside a MongoDB test suite:
+
+ $ ./buildscripts/resmoke.py --jscore jstests/core/count.js
+ $ ./buildscripts/resmoke.py --disk jstests/disk/b*.js
+ $ ./buildscripts/resmoke.py --sharding jstests/sharding/addshard1.js jstests/sharding/addshard2.js
+
+To run a suite with authentication:
+
+ $ ./buildscripts/resmoke.py --jscore --auth
+ $ ./buildscripts/resmoke.py --sharding --auth_shell
+
+NOTE: You may need to change permissions for the jstests/libs/authTestsKey.
+
+To run the core suite with master/slave replication (small oplog):
+
+ $ ./buildscripts/resmoke.py --jscore --master_slave
+ $ ./buildscripts/resmoke.py --jscore --master_slave --auth
+
+By default, the output of smoke testing goes to files. This can be changed, however:
+
+ $ ./buildscripts/resmoke.py --disk --log_console
+ $ ./buildscripts/resmoke.py --jscore --log_suppress
+
+Sometimes we may want to set custom options while running the standard suites:
+
+ $ ./buildscripts/resmoke.py --jscore \
+ --set "executor.fixtures.mongodb_server.mongod_options.noprealloc" ""
+
+... or change the dbpath of the mongod fixture:
+
+ $ ./buildscripts/resmoke.py --jscore \
+ --set "executor.fixtures.mongodb_server.mongod_options.dbpath" "/data/db/mypath"
+
+... or change the executables used by the mongod fixture and the shell:
+
+ $ ./buildscripts/resmoke.py --jscore \
+ --set "executor.fixtures.mongodb_server.mongod_executable" "mongod-2.6" \
+ --set "executor.testers.js_test.shell_executable" "mongo-2.6"
+
+... or change verbosity of the mongod fixture:
+
+ $ ./buildscripts/resmoke.py --jscore \
+ --set "executor.fixtures.mongodb_server.mongod_options.verbose" 2
+
+... or change the value of a server parameter:
+
+ $ ./buildscripts/resmoke.py --jscore \
+ --set "executor.fixtures.mongodb_server.mongod_options.set_parameters.enableLocalhostAuthBypass" "false"
+
+... or set some auth parameters:
+
+ $ ./buildscripts/resmoke.py --jscore --auth \
+ --set "executor.fixtures.mongodb_server.mongod_options.keyFile" "myKey" \
+ --set "executor.fixtures.mongodb_server.mongod_options.setParameters.enableLocalhostAuthBypass" false \
+ --set "executor.fixtures.shell_globals.TestData.keyFile" "myKey"
+
+This can quickly get wordy, with lots of parameters. However, if this is a configuration you plan on testing repeatedly:
+
+ $ mkdir -p ~/.smoke_config
+ $ ./buildscripts/resmoke.py [all your options and args here] --dump-options > ~/.smoke_config/my_auth.yaml
+ $ ./buildscripts/resmoke.py --my_auth
+
+Note that you can also pipe config file data *into* resmoke.py if you'd like to toy with custom config processing.
+
+As you can see, "special" options to resmoke.py are actually just .json/.yaml option files. The "smoke_config" module provides access to the default suite .json/.yaml files, and you can add/override to these option files in your local user ".smoke_config" directory. Equivalently you can use the '--config-file' option to load a file not in the special directories.
+
+Also, considering running individual files in a suite:
+
+ $ ./buildscripts/resmoke.py --jscore jstests/core/count.js
+
+This is just shorthand for overriding the "tests.roots" option with the specified files:
+
+ $ ./buildscripts/resmoke.py --jscore --set "tests.roots" "jstests/core/count.js"
+
+TEST METADATA:
+
+Test metadata comes from two sources - embedded in the test files themselves and in a special "test_metadata.json" sibling file on the same path as the test file (by default). For jstests, the "test_metadata.json" file isn't really necessary to manage - but for executable tests that are not inspectable (unittests, dbtests) an external metadata file is needed.
+
+For jstests things are generally simpler. Tags can be added to jstests and will be parsed (by default) when resmoke.py builds the test database from the test roots. These tags have the following form, at the beginning of a jstest file:
+
+ /**
+ * @tags : [ mytagA, mytagB ]
+ */
+
+Note that the tags array must be well-formed YAML.
+
+These tags are then available for test filtering:
+
+ $ ./buildscripts/resmoke.py --jscore --set suite.include_tags '^mytagA$'
+ $ ./buildscripts/resmoke.py --disk --set suite.exclude_tags '^assumes_memmapped$'
+
+NOTE: smoke.py has also been instrumented to manage basic jstest tags, with the following syntax:
+
+ $ ./buildscripts/smoke.py jsCore --include-tags '^mytagA$'
+
+TEST METADATA BUILDING:
+
+For automated / repeated testing, sometimes it isn't desirable to scan every test file for changes to test metadata. The "tests.extract_metadata" option controls this behavior. An example script to extract metadata in one shot (which can then be used for many test runs without further extraction) is available at:
+
+ $ ./buildscripts/resmoke_build_metadata.py --jscore
+
+Note that the example script uses the same kind of options as the resmoke.py script.
+
+INTEGRATION WITH OTHER TOOLS:
+
+To use test database, suite extraction, and suite execution functionality in other tools (like SConscript), import the "smoke" module. This provides:
+
+- smoke.tests: test discovery, metadata load/save
+- smoke.suite: test filtering by tags
+- smoke.executor: test execution with custom fixtures and logging
diff --git a/buildscripts/smoke/__init__.py b/buildscripts/smoke/__init__.py
new file mode 100644
index 00000000000..41d6ee653e3
--- /dev/null
+++ b/buildscripts/smoke/__init__.py
@@ -0,0 +1,9 @@
+import json_options
+import tests
+import suites
+import executor
+
+from fixtures import *
+from testers import *
+
+
diff --git a/buildscripts/smoke/executor.py b/buildscripts/smoke/executor.py
new file mode 100644
index 00000000000..bae5f10b77f
--- /dev/null
+++ b/buildscripts/smoke/executor.py
@@ -0,0 +1,137 @@
+"""
+Module which allows execution of a suite of tests with customizable fixtures and testers.
+
+Fixtures are set up per-suite, and register APIs per-test. Generally this is custom setup code.
+
+Testers encapsulate test code of different types in a standard, UnitTest object.
+"""
+
+import inspect
+import logging
+import traceback
+import unittest
+
+import fixtures
+import testers
+
+
+def exec_suite(suite, logger, **kwargs):
+ """Main entry point, executes a suite of tests with the given logger and executor arguments."""
+
+ suite_executor = TestSuiteExecutor(logger, **kwargs)
+
+ try:
+ successful_setup = suite_executor.setup_suite(suite)
+
+ if successful_setup:
+ suite_executor.exec_suite()
+
+ finally:
+ suite_executor.teardown_suite(suite)
+
+
+def instantiate(class_name, *args, **kwargs):
+ """Helper to dynamically instantiate a class from a name."""
+ split_name = class_name.split(".")
+ module_name = split_name[0]
+ class_name = ".".join(split_name[1:])
+
+ module = __import__(module_name)
+ class_ = getattr(module, class_name)
+ return class_(*args, **kwargs)
+
+
+class TestSuiteExecutor(object):
+
+ """The state of execution of a suite of tests.
+
+ The job of the TestSuiteExecutor is to convert the incoming fixtures and tester configuration
+ into Fixture and TestCase objects, then execute them using the standard unittest framework.
+
+ """
+
+ def __init__(self, logger, testers={}, fixtures={}, fail_fast=False, **kwargs):
+
+ self.logger = logger
+ self.testers = testers
+ self.fixtures = fixtures
+ self.fail_fast = fail_fast
+
+ if len(kwargs) > 0:
+ raise optparse.OptionValueError("Unrecognized options for executor: %s" % kwargs)
+
+ for fixture_name in self.fixtures:
+ self.fixtures[fixture_name] = \
+ self.build_fixture(fixture_name, **self.fixtures[fixture_name])
+
+ def build_fixture(self, fixture_name, fixture_class=None, fixture_logger=None,
+ **fixture_kwargs):
+
+ if not fixture_class:
+ fixture_class = fixtures.DEFAULT_FIXTURE_CLASSES[fixture_name]
+
+ if not fixture_logger:
+ fixture_logger = self.logger.getChild("fixtures.%s" % fixture_name)
+ else:
+ fixture_logger = logging.getLogger(fixture_logger)
+
+ return instantiate(fixture_class, fixture_logger, **fixture_kwargs)
+
+ def build_tester(self, test):
+
+ tester_type = test.test_type
+
+ def extract_tester_args(tester_class=None, tester_logger=None, **tester_kwargs):
+ return tester_class, tester_logger, tester_kwargs
+
+ tester_class, tester_logger, tester_kwargs = \
+ extract_tester_args(
+ **(self.testers[tester_type] if tester_type in self.testers else {}))
+
+ if not tester_class:
+ tester_class = testers.DEFAULT_TESTER_CLASSES[tester_type]
+
+ if not tester_logger:
+ tester_logger = self.logger.getChild("testers.%s.%s" % (tester_type, test.uri))
+ else:
+ tester_logger = logging.getLogger(tester_logger)
+
+ test_apis = []
+ for fixture_name, fixture in self.fixtures.items():
+ test_api = fixture.build_api(tester_type, tester_logger)
+ if test_api:
+ test_apis.append(test_api)
+
+ return instantiate(tester_class, test, test_apis, tester_logger, **tester_kwargs)
+
+ def setup_suite(self, suite):
+
+ self.setup_fixtures = {}
+ for fixture_name, fixture in self.fixtures.items():
+ try:
+ fixture.setup()
+ self.setup_fixtures[fixture_name] = fixture
+ except:
+ print "Suite setup failed: %s" % fixture_name
+ traceback.print_exc()
+ return False
+
+ self.unittest_suite = unittest.TestSuite()
+ for test in suite:
+ self.unittest_suite.addTest(self.build_tester(test))
+
+ return True
+
+ def exec_suite(self):
+ # TODO: More stuff here?
+ unittest.TextTestRunner(
+ verbosity=2, failfast=self.fail_fast).run(self.unittest_suite)
+
+ def teardown_suite(self, suite):
+
+ for fixture_name, fixture in self.setup_fixtures.items():
+ try:
+ fixture.teardown()
+ except:
+ print "Suite teardown failed: %s" % fixture_name
+ traceback.print_exc()
diff --git a/buildscripts/smoke/external_programs.py b/buildscripts/smoke/external_programs.py
new file mode 100644
index 00000000000..7ccc7793f44
--- /dev/null
+++ b/buildscripts/smoke/external_programs.py
@@ -0,0 +1,352 @@
+
+"""
+Module for simple execution of external programs with keyword arguments.
+
+Also supports piping output into standard logging utilities.
+"""
+
+
+import logging
+import os
+import threading
+import sys
+import subprocess
+
+KWARG_TYPE_IGNORE = -1
+KWARG_TYPE_NORMAL = 0
+KWARG_TYPE_EQUAL = 1
+KWARG_TYPE_MULTIPLE = 2
+KWARG_TYPE_CALLBACK = 3
+
+
+def apply_json_args(process, json_doc, custom_kwargs={}):
+ """Translate keyword arguments (JSON) into an argument list for an external process.
+
+ CALLBACK-type args can do arbitrary things to the process being started (set env vars, change
+ the process name, etc.).
+
+ """
+
+ for field in json_doc:
+
+ kwarg, kwarg_type = ("--" + field, KWARG_TYPE_NORMAL) if field not in custom_kwargs \
+ else custom_kwargs[field][0:2]
+ value = json_doc[field]
+
+ if kwarg_type == KWARG_TYPE_NORMAL:
+
+ if value is not None:
+ process.arguments.append(kwarg)
+ if str(value):
+ process.arguments.append(str(value))
+
+ elif kwarg_type == KWARG_TYPE_EQUAL:
+
+ process.arguments.append(kwarg + "=" + str(value))
+
+ elif kwarg_type == KWARG_TYPE_MULTIPLE:
+
+ for ind_value in value:
+ process.arguments.append(kwarg)
+ process.arguments.append(str(ind_value))
+
+ elif kwarg_type == KWARG_TYPE_CALLBACK:
+
+ cl_arg_callback = custom_kwargs[field][2]
+ cl_arg_callback(process, field, value)
+
+
+class LoggerPipe(threading.Thread):
+
+ """Monitors an external program's output and sends it to a logger."""
+
+ def __init__(self, logger, level, pipe_out):
+ threading.Thread.__init__(self)
+
+ self.logger = logger
+ self.level = level
+ self.pipe_out = pipe_out
+
+ self.lock = threading.Lock()
+ self.condition = threading.Condition(self.lock)
+
+ self.started = False
+ self.finished = False
+
+ self.start()
+
+ def run(self):
+ with self.lock:
+ self.started = True
+ self.condition.notify_all()
+
+ for line in self.pipe_out:
+ self.logger.log(self.level, line.strip())
+
+ with self.lock:
+ self.finished = True
+ self.condition.notify_all()
+
+ def wait_until_started(self):
+ with self.lock:
+ while not self.started:
+ self.condition.wait()
+
+ def wait_until_finished(self):
+ with self.lock:
+ while not self.finished:
+ self.condition.wait()
+
+ def flush(self):
+ for handler in self.logger.handlers:
+ handler.flush()
+
+
+class ExternalContext(object):
+
+ def __init__(self, env=None, env_vars={}, logger=None, **kwargs):
+ self.env = env
+ self.env_vars = env_vars
+ self.logger = logger
+ if not logger:
+ return logging.getLogger("")
+ self.kwargs = dict(kwargs.items())
+
+ def clone(self):
+ return ExternalContext(self.env, self.env_vars, self.logger, **self.kwargs)
+
+
+class ExternalProgram(object):
+
+ """Encapsulates an execution of an external program.
+
+ Unlike subprocess, does not immediately execute the program but allows for further configuration
+ and setup. Converts keyword arguments in JSON into an argument list and allows for easy
+ execution with custom environment variables.
+
+ """
+
+ def __init__(self,
+ executable,
+ context=None, env=None, env_vars=None,
+ custom_kwargs={},
+ **kwargs):
+
+ self.executable = executable
+ self.context = context
+ if not self.context:
+ self.context = ExternalContext(env, env_vars, **kwargs)
+ else:
+ self.context.kwargs.update(kwargs)
+
+ self.custom_kwargs = custom_kwargs
+
+ self.process = None
+
+ def build_process(self, context=None):
+
+ if not context:
+ context = self.context
+
+ process_kwargs = {}
+ process_kwargs.update(context.kwargs)
+
+ process = _Process(self.executable,
+ env_vars=context.env_vars,
+ logger=context.logger)
+
+ apply_json_args(process, process_kwargs, self.custom_kwargs)
+
+ return process
+
+ def logger(self):
+ return self.context.logger
+
+ def start(self):
+ self.process = self.build_process()
+ self.process.start()
+
+ def pid(self):
+ return self.process.subprocess.pid
+
+ def poll(self):
+ return self.process.poll()
+
+ def wait(self):
+ return_code = self.process.wait()
+ self.process = None
+ return return_code
+
+ def stop(self):
+ return_code = self.process.stop()
+ self.process = None
+ return return_code
+
+ def flush(self):
+ self.process.flush()
+
+ def __str__(self):
+ return (self.process if self.process else self.build_process()).__str__()
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class _Process(object):
+
+ """The system-independent execution of an external program.
+
+ Handles finicky stuff once we have our environment, arguments, and logger sorted out.
+
+ """
+
+ def __init__(self, executable, arguments=[], env=None, env_vars=None, logger=None):
+
+ self.executable = executable
+ self.arguments = [] + arguments
+ self.env = env
+ self.env_vars = env_vars
+ self.logger = logger
+
+ self.subprocess = None
+ self.stdout_logger = None
+ self.stderr_logger = None
+ # Windows only
+ self.subprocess_job_object = None
+
+ def start(self):
+
+ argv, env = [self.executable] + self.arguments, self.env
+
+ if self.env_vars:
+ if not env:
+ env = os.environ.copy()
+ env.update(self.env_vars)
+
+ creation_flags = 0
+ if os.sys.platform == "win32":
+ # Magic number needed to allow job reassignment in Windows 7
+ # see: MSDN - Process Creation Flags - ms684863
+ CREATE_BREAKAWAY_FROM_JOB = 0x01000000
+ creation_flags = CREATE_BREAKAWAY_FROM_JOB
+
+ stdout = sys.stdout if not self.logger else subprocess.PIPE
+ stderr = sys.stderr if not self.logger else subprocess.PIPE
+
+ self.subprocess = subprocess.Popen(argv, env=env, creationflags=creation_flags,
+ stdout=stdout, stderr=stderr)
+
+ if stdout == subprocess.PIPE:
+ self.stdout_logger = LoggerPipe(self.logger, logging.INFO, self.subprocess.stdout)
+ self.stdout_logger.wait_until_started()
+ if stderr == subprocess.PIPE:
+ self.stderr_logger = LoggerPipe(self.logger, logging.ERROR, self.subprocess.stderr)
+ self.stderr_logger.wait_until_started()
+
+ if os.sys.platform == "win32":
+
+ # Create a job object with the "kill on job close" flag
+ # This is inherited by child processes (i.e. the mongod started on our behalf by
+ # buildlogger) and lets us terminate the whole tree of processes rather than
+ # orphaning the mongod.
+ import win32job
+
+ job_object = win32job.CreateJobObject(None, '')
+
+ job_info = win32job.QueryInformationJobObject(
+ job_object,
+ win32job.JobObjectExtendedLimitInformation)
+ job_info['BasicLimitInformation']['LimitFlags'] |= \
+ win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
+ win32job.SetInformationJobObject(job_object,
+ win32job.JobObjectExtendedLimitInformation,
+ job_info)
+ win32job.AssignProcessToJobObject(job_object, proc._handle)
+
+ self.subprocess_job_object = job_object
+
+ def poll(self):
+ return self.subprocess.poll()
+
+ def wait(self):
+
+ return_code = self.subprocess.wait()
+
+ self.flush()
+ if self.stdout_logger:
+ self.stdout_logger.wait_until_finished()
+ self.stdout_logger = None
+ if self.stderr_logger:
+ self.stderr_logger.wait_until_finished()
+ self.stderr_logger = None
+
+ return return_code
+
+ def stop(self):
+
+ try:
+ if os.sys.platform == "win32":
+ import win32job
+ win32job.TerminateJobObject(self.subprocess_job_object, -1)
+ # Windows doesn't seem to kill the process immediately, so give
+ # it some time to die
+ time.sleep(5)
+ elif hasattr(self.subprocess, "terminate"):
+ # This method added in Python 2.6
+ self.subprocess.terminate()
+ else:
+ os.kill(self.subprocess.pid, 15)
+ except Exception as e:
+ print >> self.subprocess_outputs.stderr, "error shutting down process"
+ print >> self.subprocess_outputs.stderr, e
+
+ return self.wait()
+
+ def flush(self):
+
+ if self.subprocess:
+ if not self.stderr_logger:
+ # Going to the console
+ sys.stderr.flush()
+ else:
+ self.stderr_logger.flush()
+
+ if self.subprocess:
+ if not self.stdout_logger:
+ # Going to the console
+ sys.stdout.flush()
+ else:
+ self.stdout_logger.flush()
+
+ def __str__(self):
+
+ # We only want to show the *different* environment variables
+ def env_compare(env_orig, env_new):
+ diff = {}
+ for field, value in env_new.iteritems():
+ if not field in env_orig:
+ diff[field] = value
+ return diff
+
+ env_diff = env_compare(os.environ, self.env) if self.env else {}
+ if self.env_vars:
+ for field, value in self.env_vars.iteritems():
+ env_diff[field] = value
+
+ env_strs = []
+ for field, value in env_diff.iteritems():
+ env_strs.append("%s=%s" % (field, value))
+
+ cl = []
+ if env_strs:
+ cl.append(" ".join(env_strs))
+ cl.append(self.executable)
+ if self.arguments:
+ cl.append(" ".join(self.arguments))
+ if self.subprocess:
+ cl.append("(%s)" % self.subprocess.pid)
+
+ return " ".join(cl)
+
+ def __repr__(self):
+ return self.__str__()
diff --git a/buildscripts/smoke/fixtures.py b/buildscripts/smoke/fixtures.py
new file mode 100644
index 00000000000..7ae8a506c7c
--- /dev/null
+++ b/buildscripts/smoke/fixtures.py
@@ -0,0 +1,314 @@
+"""
+Fixtures for the execution of JSTests
+"""
+
+import os
+import shutil
+import time
+
+from external_programs import *
+from mongodb_programs import MongoD
+from mongodb_programs import MONGOD_DEFAULT_DATA_PATH
+from mongodb_programs import MONGOD_DEFAULT_EXEC
+
+DEFAULT_FIXTURE_CLASSES = {"mongodb_server": "smoke.SingleMongoDFixture",
+ "shell_globals": "smoke.GlobalShellFixture"}
+
+
+class Fixture(object):
+
+ """Base class for all fixture objects - require suite setup and teardown and api per-test."""
+
+ def __init__(self, logger):
+ self.logger = logger
+
+ def setup(self):
+ pass
+
+ def build_api(self, test_type, test_logger):
+ pass
+
+ def teardown(self):
+ pass
+
+
+class SimpleFixture(Fixture):
+
+ """Simple fixture objects do not have extra state per-test.
+
+ This means they can implement the api by just implementing the add_to_<type> methods.
+
+ Fixtures which need to do per-test logging cannot use this simplification, for example.
+ """
+
+ def __init__(self, logger):
+ Fixture.__init__(self, logger)
+
+ def build_api(self, test_type, test_logger):
+ return self
+
+ def add_to_shell(self, shell_context):
+ pass
+
+ def add_to_process(self, external_context):
+ pass
+
+ def teardown_api(self):
+ pass
+
+
+def _get_mapped_size_MB(client):
+ status = client.admin.command("serverStatus")
+
+ if "mem" not in status or "mapped" not in status["mem"]:
+ raise Exception(
+ "Could not get data size of MongoDB server, status was %s" % status)
+
+ return status["mem"]["mapped"]
+
+
+class SingleMongoDFixture(SimpleFixture):
+
+ """Basic fixture which provides JSTests with a single-MongoD database to connect to.
+
+ Can be restarted automatically after reaching a configurable "mapped" size.
+
+ """
+
+ def __init__(self, logger,
+ mongod_executable=MONGOD_DEFAULT_EXEC,
+ mongod_options={},
+ default_data_path=MONGOD_DEFAULT_DATA_PATH,
+ preserve_dbpath=False,
+ max_mapped_size_MB=None):
+
+ self.logger = logger
+ self.mongod_executable = mongod_executable
+ self.mongod_options = mongod_options
+
+ self.default_data_path = default_data_path
+ self.preserve_dbpath = preserve_dbpath
+ self.max_mapped_size_MB = max_mapped_size_MB
+
+ self.mongod = None
+
+ def setup(self):
+
+ if self.mongod is None:
+ self.mongod = MongoD(executable=self.mongod_executable,
+ default_data_path=self.default_data_path,
+ preserve_dbpath=self.preserve_dbpath,
+ context=ExternalContext(logger=self.logger),
+ **self.mongod_options)
+
+ try:
+ self.logger.info("Starting MongoDB server...\n%s" % self.mongod)
+
+ self.mongod.start()
+
+ self.logger.info("MongoDB server started at %s:%s with pid %s." %
+ (self.mongod.host, self.mongod.port, self.mongod.pid()))
+
+ self.mongod.wait_for_client()
+
+ self.logger.info("MongoDB server at %s:%s successfully contacted." %
+ (self.mongod.host, self.mongod.port))
+
+ self.mongod.flush()
+
+ except:
+ self.logger.error("MongoDB server failed to start.", exc_info=True)
+ raise
+
+ def add_to_shell(self, shell_context):
+ shell_context.db_address = \
+ "%s:%s" % (self.mongod.host, self.mongod.port)
+
+ def teardown_api(self):
+ if self.max_mapped_size_MB is not None:
+ if _get_mapped_size_MB(self.mongod.client()) > self.max_mapped_size_MB:
+
+ self.logger.info(
+ "Maximum mapped size %sMB reached, restarting MongoDB..." %
+ self.max_mapped_size_MB)
+
+ self.teardown()
+ self.setup()
+
+ def teardown(self):
+
+ try:
+ self.logger.info("Stopping MongoDB server at %s:%s with pid %s..." %
+ (self.mongod.host, self.mongod.port, self.mongod.pid()))
+
+ self.mongod.stop()
+
+ self.logger.info("MongoDB server stopped.")
+
+ except:
+ self.logger.error("MongoDB server failed to stop.", exc_info=True)
+ raise
+
+
+class MasterSlaveFixture(SimpleFixture):
+
+ """Fixture which provides JSTests with a master-MongoD database to connect to.
+
+ A slave MongoD instance replicates the master in the background.
+
+ """
+
+ def __init__(self, logger,
+ mongod_executable=MONGOD_DEFAULT_EXEC,
+ mongod_options={},
+ master_options={},
+ slave_options={},
+ default_data_path=MONGOD_DEFAULT_DATA_PATH,
+ preserve_dbpath=False,
+ max_mapped_size_MB=None):
+
+ self.logger = logger
+ self.mongod_executable = mongod_executable
+
+ self.master_options = {}
+ self.master_options.update(mongod_options)
+ self.master_options.update(master_options)
+
+ self.slave_options = {}
+ self.slave_options.update(mongod_options)
+ self.slave_options.update(slave_options)
+
+ self.default_data_path = default_data_path
+ self.preserve_dbpath = preserve_dbpath
+ self.max_mapped_size_MB = max_mapped_size_MB
+
+ self.master = None
+ self.slave = None
+
+ def setup(self):
+
+ if self.master is None:
+
+ self.master_options["master"] = ""
+
+ self.master = MongoD(executable=self.mongod_executable,
+ default_data_path=self.default_data_path,
+ preserve_dbpath=self.preserve_dbpath,
+ context=ExternalContext(logger=self.logger),
+ **self.master_options)
+
+ try:
+ self.logger.info("Starting MongoDB master server...\n%s" % self.master)
+
+ self.master.start()
+
+ self.logger.info("MongoDB master server started at %s:%s with pid %s." %
+ (self.master.host, self.master.port, self.master.pid()))
+
+ self.master.wait_for_client()
+
+ self.logger.info("MongoDB master server at %s:%s successfully contacted." %
+ (self.master.host, self.master.port))
+
+ self.master.flush()
+
+ except:
+ self.logger.error("MongoDB master server failed to start.", exc_info=True)
+ raise
+
+ if self.slave is None:
+
+ self.slave_options["slave"] = ""
+ self.slave_options["source"] = "%s:%s" % (self.master.host, self.master.port)
+
+ self.slave = MongoD(executable=self.mongod_executable,
+ default_data_path=self.default_data_path,
+ context=ExternalContext(logger=self.logger),
+ **self.slave_options)
+
+ try:
+ self.logger.info("Starting MongoDB slave server...\n%s" % self.slave)
+
+ self.slave.start()
+
+ self.logger.info("MongoDB slave server started at %s:%s with pid %s." %
+ (self.slave.host, self.slave.port, self.slave.pid()))
+
+ self.slave.wait_for_client()
+
+ self.logger.info("MongoDB slave server at %s:%s successfully contacted." %
+ (self.slave.host, self.slave.port))
+
+ self.slave.flush()
+
+ except:
+ self.logger.error("MongoDB slave server failed to start.", exc_info=True)
+ raise
+
+ def add_to_shell(self, shell_context):
+ shell_context.db_address = \
+ "%s:%s" % (self.master.host, self.master.port)
+
+ def teardown_api(self):
+ if self.max_mapped_size_MB is not None:
+ if _get_mapped_size_MB(self.master.client()) > self.max_mapped_size_MB:
+
+ self.logger.info(
+ "Maximum mapped size %sMB reached, restarting MongoDB..." %
+ self.max_mapped_size_MB)
+
+ self.teardown()
+ self.setup()
+
+ def teardown(self):
+
+ try:
+ self.logger.info("Stopping MongoDB slave server at %s:%s with pid %s..." %
+ (self.slave.host, self.slave.port, self.slave.pid()))
+
+ self.slave.stop()
+
+ self.logger.info("MongoDB slave server stopped.")
+
+ except:
+ self.logger.error("MongoDB slave server failed to stop.", exc_info=True)
+ raise
+
+ try:
+ self.logger.info("Stopping MongoDB master server at %s:%s with pid %s..." %
+ (self.master.host, self.master.port, self.master.pid()))
+
+ self.master.stop()
+
+ self.logger.info("MongoDB master server stopped.")
+
+ except:
+ self.logger.error("MongoDB master server failed to stop.", exc_info=True)
+ raise
+
+
+class GlobalShellFixture(SimpleFixture):
+
+ """Passthrough fixture which just allows passing JSON options directly as shell global vars.
+
+ Useful for passing arbitrary options to jstests when running in the shell, for example auth
+ options.
+
+ """
+
+ def __init__(self, logger, **kwargs):
+
+ self.logger = logger
+ self.kwargs = kwargs
+
+ def setup(self):
+ pass
+
+ def add_to_shell(self, shell_context):
+ shell_context.global_context.update(self.kwargs)
+
+ def teardown_api(self):
+ pass
+
+ def teardown(self):
+ pass
diff --git a/buildscripts/smoke/json_options.py b/buildscripts/smoke/json_options.py
new file mode 100755
index 00000000000..b6f95a916b7
--- /dev/null
+++ b/buildscripts/smoke/json_options.py
@@ -0,0 +1,476 @@
+#!/usr/bin/python
+
+"""
+JSON/YAML option parsing library and command line manipulation.
+
+Also the entry point for running tests based on JSON options files. See usage for more info.
+"""
+
+import json
+import optparse
+import os
+import re
+import sys
+
+# Transparently handle YAML existing or not
+try:
+ import yaml
+except ImportError:
+ yaml = None
+
+
+def json_underscore_fields(root):
+ """Convert fields to underscore."""
+
+ if isinstance(root, dict):
+ for field, value in root.items():
+ del root[field]
+ root[field.replace("-", "_")] = json_underscore_fields(value)
+ elif isinstance(root, list):
+ for i in range(0, len(root)):
+ root[i] = json_underscore_fields(root[i])
+
+ return root
+
+COMMENT_RE = \
+ re.compile(
+ '(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?', re.DOTALL | re.MULTILINE)
+
+
+def json_strip_comments(json_with_comments):
+ """Strip comments from JSON strings, for easier input."""
+
+ # Looking for comments
+ match = COMMENT_RE.search(json_with_comments)
+ while match:
+ # single line comment
+ json_with_comments = json_with_comments[
+ :match.start()] + json_with_comments[match.end():]
+ match = COMMENT_RE.search(json_with_comments)
+
+ return json_with_comments
+
+
+def json_update(root, new_root):
+ """Recursively update a JSON document with another JSON document, merging where necessary."""
+
+ if isinstance(root, dict) and isinstance(new_root, dict):
+
+ for field in new_root:
+
+ field_value = root[field] if field in root else None
+ new_field_value = new_root[field]
+
+ root[field] = json_update(field_value, new_field_value)
+
+ return root
+
+ return new_root
+
+
+class Unset(object):
+
+ """Special type for 'unset' JSON field, used below."""
+
+ def __init__(self):
+ pass
+
+ def __str__(self):
+ return "~"
+
+ def __repr__(self):
+ return self.__str__()
+
+
+def json_update_path(root, path, value, **kwargs):
+ """Update a JSON root based on a path. Special '.'-traversal, and '*' and '**' traversal.
+
+ Paths like "x.*.y" resolve to any path starting with x, having a single intermediate subpath,
+ and ending with y. Example: "x.a.y", "x.b.y"
+
+ Paths like "x.**.y" resolve to any path starting with x, having zero or more intermediate
+ subpaths, and ending with y. Example: "x.y", "x.a.y", "x.b.c.y"
+
+ """
+
+ head_path, rest_path = split_json_path(path)
+
+ implicit_create = kwargs[
+ "implicit_create"] if "implicit_create" in kwargs else True
+ push = kwargs["push"] if "push" in kwargs else False
+
+ indent = kwargs["indent"] if "indent" in kwargs else ""
+ kwargs["indent"] = indent + " "
+
+ # print indent, root, head_path, rest_path, kwargs
+
+ if not head_path:
+
+ if not push:
+ return value
+
+ else:
+ # Implicitly create a root array if we need to push
+ if isinstance(root, Unset):
+ if not implicit_create:
+ return root
+ else:
+ root = []
+
+ if not isinstance(root, list):
+ root = [root]
+
+ root.append(value)
+ return root
+
+ # star-star-traverse all children recursively including the root itself
+ if head_path == "**":
+
+ # Don't create nonexistent child paths when star-traversing
+ kwargs["implicit_create"] = False
+
+ root_range = range(0, 0)
+ if isinstance(root, dict):
+ root_range = root.keys()
+ elif isinstance(root, list):
+ root_range = range(0, len(root))
+
+ for field in root_range:
+
+ # Update field children *and* field doc if ** - ** updates root
+ # *and* children
+ root[field] = json_update_path(
+ root[field], "**." + rest_path, value, **kwargs)
+ if isinstance(root[field], Unset):
+ del root[field]
+
+ # Update current root too if ** and we haven't already pushed to the
+ # list
+ root = json_update_path(root, rest_path, value, **kwargs)
+
+ return root
+
+ # don't traverse values
+ if not isinstance(root, Unset) and not isinstance(root, list) and not isinstance(root, dict):
+ return root
+
+ # star-traverse docs
+ if head_path == "*" and isinstance(root, dict):
+
+ # Don't create nonexistent child paths when star-traversing
+ kwargs["implicit_create"] = False
+
+ for field in root:
+ root[field] = json_update_path(
+ root[field], rest_path, value, **kwargs)
+ if isinstance(root[field], Unset):
+ del root[field]
+
+ return root
+
+ # traverse lists
+ if isinstance(root, list):
+
+ root_range = None
+
+ if head_path.isdigit():
+ # numeric index arrays
+ root_range = range(int(head_path), int(head_path) + 1)
+ else:
+
+ if head_path == "*":
+ # Don't create nonexistent child paths when star-traversing
+ kwargs["implicit_create"] = False
+
+ # dot- or star-traverse arrays
+ root_range = range(0, len(root))
+ # don't consume head unless '*'
+ rest_path = path if head_path != "*" else rest_path
+
+ for i in root_range:
+ root[i] = json_update_path(root[i], rest_path, value, **kwargs)
+ if isinstance(root[i], Unset):
+ del root[i]
+
+ return root
+
+ # Implicitly create a root doc if we need to keep traversing
+ if isinstance(root, Unset):
+ if not implicit_create:
+ return root
+ else:
+ root = {}
+
+ # Traverse into the dict object
+ if not head_path in root:
+ root[head_path] = Unset()
+
+ root[head_path] = json_update_path(
+ root[head_path], rest_path, value, **kwargs)
+ if isinstance(root[head_path], Unset):
+ del root[head_path]
+
+ return root
+
+
+def split_json_path(path):
+
+ split_path = path.split(".")
+ if len(split_path) == 1:
+ split_path.append(".")
+ rest_path = ".".join(split_path[1:])
+ return (split_path[0], rest_path)
+
+
+def json_coerce(json_value):
+ try:
+ return json.loads('[' + json_value + ']')[0]
+ except:
+ return json.loads('["' + json_value + '"]')[0]
+
+
+def json_string_load(json_str):
+ """Loads JSON data from a JSON string or a YAML string"""
+
+ try:
+ return json.loads(json_strip_comments(json_str))
+ except:
+ if yaml:
+ return yaml.load(json_str)
+ else:
+ raise
+
+
+def json_pipe_load(json_pipe):
+ """Loads JSON data from a JSON data source or a YAML data source"""
+ return json_string_load("".join(json_pipe.readlines()))
+
+
+def json_file_load(json_filename):
+ """Loads JSON data from a JSON file or a YAML file"""
+
+ try:
+ with open(json_filename) as json_file:
+ return json_pipe_load(json_file)
+ except Exception as ex:
+ filebase, ext = os.path.splitext(json_filename)
+ if not yaml and ext == ".yaml":
+ raise Exception(("YAML library not found, cannot load %s, " +
+ "install PyYAML to correct this.") % json_filename, ex)
+
+
+def json_dump(root, json_only=False):
+ if json_only or not yaml:
+ return json.dumps(root, sort_keys=True, indent=2)
+ else:
+ return yaml.safe_dump(root, default_flow_style=False)
+
+
+class MultipleOption(optparse.Option):
+
+ """Custom option class to allow parsing special JSON options by path."""
+
+ ACTIONS = optparse.Option.ACTIONS + \
+ ("extend", "json_file_update", "json_set", "json_unset", "json_push")
+ STORE_ACTIONS = optparse.Option.STORE_ACTIONS + \
+ ("extend", "json_file_update", "json_set", "json_unset", "json_push")
+ TYPED_ACTIONS = optparse.Option.TYPED_ACTIONS + \
+ ("extend", "json_file_update", "json_set", "json_unset", "json_push")
+ ALWAYS_TYPED_ACTIONS = optparse.Option.ALWAYS_TYPED_ACTIONS + \
+ ("extend", "json_file_update", "json_set", "json_unset", "json_push")
+
+ def take_action(self, action, dest, opt, value, values, parser):
+
+ if action == "extend":
+ if isinstance(value, list):
+ dest_values = values.ensure_value(dest, [])
+ for item in value:
+ dest_values.append(item)
+ else:
+ values.ensure_value(dest, []).append(value)
+
+ elif action == "json_set":
+
+ values.ensure_value(dest, []).append(value)
+
+ json_path, json_value = value
+ if isinstance(json_value, str):
+ json_value = json_coerce(json_value)
+
+ parser.json_root = json_update_path(
+ parser.json_root, json_path, json_value)
+
+ elif action == "json_unset":
+
+ values.ensure_value(dest, []).append(value)
+
+ json_path = value
+ parser.json_root = json_update_path(
+ parser.json_root, json_path, Unset())
+ if isinstance(parser.json_root, Unset):
+ parser.json_root = {}
+
+ elif action == "json_push":
+
+ values.ensure_value(dest, []).append(value)
+
+ json_path, json_value = value
+ if isinstance(json_value, str):
+ json_value = json_coerce(json_value)
+
+ parser.json_root = json_update_path(
+ parser.json_root, json_path, json_value, push=True)
+
+ elif action == "json_file_update":
+
+ json_filename = None
+ if not value:
+ # Use default value as file
+ json_filename = values.ensure_value(dest, [])
+ else:
+ # Use specified value as file
+ values.ensure_value(dest, []).append(value)
+ json_filename = value
+
+ if not os.path.isfile(json_filename):
+ raise optparse.OptionValueError(
+ "cannot load json/yaml config from %s" % json_filename)
+
+ json_data = json_file_load(json_filename)
+ parser.json_root = json_update(parser.json_root, json_data)
+
+ else:
+ optparse.Option.take_action(
+ self, action, dest, opt, value, values, parser)
+
+
+class JSONOptionParser(optparse.OptionParser):
+
+ """Custom option parser for JSON options.
+
+ In addition to parsing normal options, also maintains a JSON document which can be updated by
+ special --set, --unset, and --push options.
+
+ """
+
+ DEFAULT_USAGE = \
+ """Complex JSON updates are supported via nested paths with dot separators:
+
+ Ex: field-a.field-b.field-c
+
+ - The --set option implicitly creates any portion of the path that does not exist, as does the \
+--push option.
+
+ - The --push option implicitly transforms the target of the push update into an array if not \
+already an array, and adds the --push'd value to the end of the array.
+
+ - The --unset option removes options by path.
+
+Arrays are traversed implicitly, or you can specify an array index as a field name to traverse a \
+particular array element.
+
+JSON specified at the command line is implicitly coerced into JSON types. To avoid ambiguity when \
+specifying string arguments, you may explicitly wrap strings in double-quotes which will always \
+transform into strings.
+
+ Ex: --set tests.foo 'abcdef' -> { "tests" : { "foo" : "abcdef" } }
+ Ex: --set tests.foo '{ "x" : 3 }' -> { "tests" : { "foo" : { "x" : 3 } }
+ Ex: --set tests.foo '"{ \"x\" : 3 }"' -> { "tests" : { "foo" : "{ \"x\" : 3 }" }
+ Ex: --set tests.foo 'true' -> { "tests" : { "foo" : true }
+ Ex: --set tests.foo '"true"' -> { "tests" : { "foo" : "true" }
+
+The special star and star-star ('*' and '**') operators allow wildcard expansion of paths.
+
+ - '*' expands to any field at the current nesting in the path
+
+ - '**' expands to *all* fields at the current or child nestings of the path - this lets one \
+easily set all fields with the same names from a particular root.
+
+ Ex: --set executor.**.mongod-options.nopreallocj ""
+
+Wildcard-expanded paths are not implicitly created when they do not already exist - this also \
+applies to wildcard --push operations.
+
+ - The --config-file option supports loading a full YAML or JSON document from file. Multiple \
+config files can be specified, in which case the documents are merged recursively, in order of \
+specification."""
+
+ def __init__(self, add_default_options=True, configfile_args={}, *args, **kwargs):
+
+ kwargs["option_class"] = MultipleOption
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ self.json_root = {}
+ self.configfile_args = configfile_args
+
+ if add_default_options:
+ self.build_default_options()
+
+ def build_default_options(self):
+
+ help = \
+ """Options specified as a JSON-formatted file, """ + \
+ """applied on top of the current JSON options."""
+
+ self.add_option('--config-file', dest='json_config_files',
+ action="json_file_update", default=[], help=help)
+
+ help = \
+ """Sets a JSON value or values along the specified path."""
+
+ self.add_option(
+ '--set', dest='json_set_values', action="json_set", nargs=2, help=help)
+
+ help = \
+ """Unsets a JSON value or values along the specified path."""
+
+ self.add_option('--unset', dest='json_unset_values', action="json_unset", nargs=1,
+ help=help)
+
+ help = \
+ """Pushes a JSON value or values along the specified path."""
+
+ self.add_option('--push', dest='json_unset_values', action="json_push", nargs=2,
+ help=help)
+
+ for configfile_arg, configfile_filename in self.configfile_args.iteritems():
+ self.add_option("--" + configfile_arg, dest=configfile_arg, action="json_file_update",
+ default=configfile_filename, nargs=0)
+
+ def parse_json_args(self):
+ if not sys.stdin.isatty():
+ self.json_root = json_pipe_load(sys.stdin)
+
+ values, args = self.parse_args()
+ return (values, args, self.json_root)
+
+USAGE = \
+ """smoke_json.py <JSON CONFIG>
+
+All options are specified as JSON - the json configuration can be loaded via a file and/or \
+specified as options via the --set, --unset, and --push operators.
+
+For example:
+ smoke_json.py --push tests.roots "./jstests/disk/*.js" \\
+ --set suite '{}' --set executor.test-executors.jstest '{}'
+
+results in:
+
+ ...
+ Test Configuration:
+ {
+ "suite": {},
+ "tests": {
+ "roots": [
+ "./jstests/disk/*.js"
+ ]
+ },
+ "executor": {
+ "test-executors": {
+ "jstest": {}
+ }
+ }
+ }
+ ...
+
+""" + JSONOptionParser.DEFAULT_USAGE
diff --git a/buildscripts/smoke/mongodb_network.py b/buildscripts/smoke/mongodb_network.py
new file mode 100644
index 00000000000..a67dbe6932f
--- /dev/null
+++ b/buildscripts/smoke/mongodb_network.py
@@ -0,0 +1,29 @@
+
+"""
+Very basic network helpers to allow programs to easily reserve network ports and manage timeouts.
+"""
+
+import time
+import socket
+
+
+class Timer(object):
+
+ def __init__(self):
+ self.start_time_secs = time.time()
+
+ def elapsed_secs(self):
+ return time.time() - self.start_time_secs
+
+
+class UnusedPort(object):
+
+ def __init__(self, port=0):
+ self.unused_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.unused_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.unused_socket.bind(("0.0.0.0", port))
+ self.addr, self.port = self.unused_socket.getsockname()
+
+ def release(self):
+ self.unused_socket.close()
+ self.unused_socket, self.addr, self.port = None, None, None
diff --git a/buildscripts/smoke/mongodb_programs.py b/buildscripts/smoke/mongodb_programs.py
new file mode 100644
index 00000000000..fca4c7dc92f
--- /dev/null
+++ b/buildscripts/smoke/mongodb_programs.py
@@ -0,0 +1,316 @@
+"""
+Basic utilities to start and stop mongo processes on the local machine.
+
+Encapsulates all the nitty-gritty parameter conversion, database path setup, and custom arguments.
+"""
+
+import json
+import os
+import pymongo
+import shutil
+import time
+
+from external_programs import *
+from mongodb_network import *
+
+#
+# Callback functions defined for special kwargs to MongoD/MongoShell/DBTests
+#
+
+
+def apply_buildlogger_args(process, field, value):
+
+ def hookup_bl(python_executable="python",
+ buildlogger_script="buildlogger.py",
+ buildlogger_global=False,
+ **kwargs):
+
+ buildlogger_arguments = [buildlogger_script]
+ if buildlogger_global:
+ buildlogger_arguments.append("-g")
+
+ buildlogger_arguments.append(process.executable)
+ process.executable = python_executable
+
+ process.arguments = buildlogger_arguments + process.arguments
+
+ for field in kwargs:
+ process.env_vars[field.upper()] = kwargs[field]
+
+ hookup_bl(**value)
+
+# The "buildlogger" argument is a special command-line parameter, does crazy stuff
+BUILDLOGGER_CUSTOM_KWARGS = \
+ {"buildlogger": (None, KWARG_TYPE_CALLBACK, apply_buildlogger_args)}
+
+
+def apply_verbose_arg(process, field, value):
+
+ verbose_arg = "v" * value
+ if verbose_arg:
+ process.arguments.append("-" + verbose_arg)
+
+# The "verbose" argument is a special command-line parameter, converts to "v"s
+VERBOSE_CUSTOM_KWARGS = \
+ {"verbose": (None, KWARG_TYPE_CALLBACK, apply_verbose_arg)}
+
+
+def apply_setparam_args(process, field, value):
+
+ for param_name, param_value in value.iteritems():
+ process.arguments.append("--setParameter")
+ process.arguments.append("%s=%s" % (param_name, json.dumps(param_value)))
+
+# The "set_parameters" arg is a special command line parameter, converts to "field=value"
+SETPARAM_CUSTOM_KWARGS = \
+ {"set_parameters": (None, KWARG_TYPE_CALLBACK, apply_setparam_args)}
+
+#
+# Default MongoD options
+#
+
+MONGOD_DEFAULT_EXEC = "./mongod"
+
+MONGOD_DEFAULT_DATA_PATH = "/data/db"
+
+MONGOD_KWARGS = dict(
+ BUILDLOGGER_CUSTOM_KWARGS.items() +
+ VERBOSE_CUSTOM_KWARGS.items() +
+ SETPARAM_CUSTOM_KWARGS.items())
+
+
+class MongoD(ExternalProgram):
+
+ """A locally-running MongoD process."""
+
+ def __init__(self,
+ executable=MONGOD_DEFAULT_EXEC,
+ default_data_path=MONGOD_DEFAULT_DATA_PATH,
+ preserve_dbpath=False,
+ custom_kwargs=MONGOD_KWARGS,
+ **kwargs):
+
+ mongod_kwargs = dict(kwargs.items())
+
+ self.host = "localhost"
+
+ if "port" in mongod_kwargs:
+ self.unused_port = UnusedPort(mongod_kwargs["port"])
+ else:
+ self.unused_port = UnusedPort()
+ mongod_kwargs["port"] = self.unused_port.port
+
+ self.port = mongod_kwargs["port"]
+
+ if "dbpath" not in mongod_kwargs:
+ mongod_kwargs["dbpath"] = \
+ os.path.join(default_data_path, "%s-%s" % (self.host, self.port))
+
+ self.dbpath = mongod_kwargs["dbpath"]
+ self.preserve_dbpath = preserve_dbpath
+
+ ExternalProgram.__init__(self, executable, custom_kwargs=custom_kwargs, **mongod_kwargs)
+
+ def _cleanup(self):
+ if not self.preserve_dbpath and os.path.exists(self.dbpath):
+ self.logger().info("Removing data in dbpath %s" % self.dbpath)
+ shutil.rmtree(self.dbpath)
+
+ def start(self):
+
+ try:
+ self._cleanup()
+
+ if not os.path.exists(self.dbpath):
+ self.logger().info("Creating dbpath at \"%s\"" % self.dbpath)
+ os.makedirs(self.dbpath)
+ except:
+ self.logger().error("Failed to setup dbpath at \"%s\"" % self.dbpath, exc_info=True)
+ raise
+
+ # Slightly racy - fixing is tricky
+ self.unused_port.release()
+ self.unused_port = None
+
+ ExternalProgram.start(self)
+
+ def wait_for_client(self, timeout_secs=30.0):
+
+ timer = Timer()
+ while True:
+
+ if self.poll() is not None:
+ # MongoD exited for some reason
+ raise Exception(
+ "Could not connect to MongoD server at %s:%s, process ended unexpectedly." %
+ (self.host, self.port))
+
+ try:
+ # Try to connect to the mongod with a pymongo client - 30s default socket timeout
+ self.client().admin.command("ismaster")
+ break
+
+ except Exception as ex:
+
+ if timer.elapsed_secs() > timeout_secs:
+ raise Exception(
+ "Failed to connect to MongoD server at %s:%s." %
+ (self.host, self.port), ex)
+ else:
+ self.logger().info("Waiting to connect to MongoD server at %s:%s..." %
+ (self.host, self.port))
+ time.sleep(0.5)
+
+ self.logger().info("Connected to MongoD server at %s:%s." % (self.host, self.port))
+
+ def client(self, **client_args):
+ return pymongo.MongoClient(self.host, self.port, **client_args)
+
+ def _wait_for_port(self, timeout_secs=10):
+ timer = Timer()
+ while True:
+ try:
+ self.unused_port = UnusedPort(self.port)
+ break
+ except Exception as ex:
+
+ if timer.elapsed_secs() > timeout_secs:
+ raise Exception("Failed to cleanup port from MongoD server at %s:%s" %
+ (self.host, self.port), ex)
+
+ self.logger().info("Waiting for MongoD server at %s:%s to relinquish port..." %
+ (self.host, self.port))
+ time.sleep(0.5)
+
+ def wait(self):
+ ExternalProgram.wait(self)
+ # Slightly racy - fixing is tricky
+ self._wait_for_port()
+ self._cleanup()
+
+ def stop(self):
+ ExternalProgram.stop(self)
+ # Slightly racy - fixing is tricky
+ self._wait_for_port()
+ self._cleanup()
+
+#
+# Default MongoShell options
+#
+
+MONGOSHELL_DEFAULT_EXEC = "./mongo"
+MONGOSHELL_KWARGS = dict(BUILDLOGGER_CUSTOM_KWARGS.items())
+
+
+class MongoShellContext(object):
+
+ """The context for a mongo shell execution.
+
+ Tests using the shell can only have APIs provided by injecting them into the shell when it
+ starts - generally as global variables.
+
+ Shell options and global variables are specified using this structure.
+ """
+
+ def __init__(self):
+ self.db_address = None
+ self.global_context = {}
+
+
+class MongoShell(ExternalProgram):
+
+ """A locally-running MongoDB shell process.
+
+ Makes it easy to start with custom global variables, pointed at a custom database, etc.
+
+ """
+
+ def __init__(self,
+ executable=MONGOSHELL_DEFAULT_EXEC,
+ shell_context=None,
+ db_address=None,
+ global_context={},
+ js_filenames=[],
+ custom_kwargs=MONGOSHELL_KWARGS,
+ **kwargs):
+
+ ExternalProgram.__init__(self, executable, custom_kwargs=custom_kwargs, **kwargs)
+
+ self.shell_context = shell_context
+ if not shell_context:
+ self.shell_context = MongoShellContext()
+ self.shell_context.db_address = db_address
+ self.shell_context.global_context.update(global_context)
+
+ self.js_filenames = js_filenames
+
+ def build_eval_context(self):
+
+ eval_strs = []
+
+ for variable, variable_json in self.shell_context.global_context.iteritems():
+ eval_strs.append("%s=%s;" % (variable, json.dumps(variable_json)))
+
+ return "".join(eval_strs)
+
+ def build_process(self):
+
+ process_context = self.context.clone()
+
+ if self.shell_context.global_context:
+
+ eval_context_str = self.build_eval_context()
+
+ if "eval" in process_context.kwargs:
+ process_context.kwargs["eval"] = process_context.kwargs["eval"] + ";" + \
+ eval_context_str
+ else:
+ process_context.kwargs["eval"] = eval_context_str
+
+ process = ExternalProgram.build_process(self, process_context)
+
+ if self.shell_context.db_address:
+ process.arguments.append(self.shell_context.db_address)
+ else:
+ process.arguments.append("--nodb")
+
+ if self.js_filenames:
+ for js_filename in self.js_filenames:
+ process.arguments.append(js_filename)
+
+ return process
+
+#
+# Default DBTest options
+#
+
+DBTEST_DEFAULT_EXEC = "./dbtest"
+DBTEST_KWARGS = dict(BUILDLOGGER_CUSTOM_KWARGS.items() + VERBOSE_CUSTOM_KWARGS.items())
+
+
+class DBTest(ExternalProgram):
+
+ """A locally running MongoDB dbtest process.
+
+ Makes it easy to start with custom named dbtests.
+
+ """
+
+ def __init__(self,
+ executable=DBTEST_DEFAULT_EXEC,
+ dbtest_names=[],
+ custom_kwargs=DBTEST_KWARGS,
+ **kwargs):
+
+ ExternalProgram.__init__(self, executable, custom_kwargs=custom_kwargs, **kwargs)
+
+ self.dbtest_names = dbtest_names
+
+ def build_process(self):
+
+ process = ExternalProgram.build_process(self)
+
+ for dbtest_name in self.dbtest_names:
+ process.arguments.append(dbtest_name)
+
+ return process
diff --git a/buildscripts/smoke/suites.py b/buildscripts/smoke/suites.py
new file mode 100644
index 00000000000..a89796e3520
--- /dev/null
+++ b/buildscripts/smoke/suites.py
@@ -0,0 +1,105 @@
+"""
+Utilities for searching a database of tests based on a query over tags provided by the tests.
+The resulting search becomes a test suite.
+"""
+
+import re
+
+
+class RegexQuery(object):
+
+ """A query based on regex includes/excludes.
+
+ TODO: Something more complicated, or link to actual MongoDB queries?
+
+ """
+
+ def __init__(self,
+ include_res=[],
+ include_except_res=[],
+ exclude_res=[],
+ exclude_except_res=[]):
+
+ self.include_res = []
+ self.include_res.extend([(include_re, False) for include_re in include_res])
+ self.include_res.extend([(include_except_re, True)
+ for include_except_re in include_except_res])
+
+ self.exclude_res = []
+ self.exclude_res.extend([(exclude_re, False) for exclude_re in exclude_res])
+ self.exclude_res.extend([(exclude_except_re, True)
+ for exclude_except_re in exclude_except_res])
+
+ def matches(self, value):
+ return self.matches_values([value])
+
+ def matches_values(self, values):
+
+ # First see if anything in the values make us included
+ included = True
+
+ if self.include_res:
+
+ for include_re, invert_match in self.include_res:
+
+ if not invert_match:
+
+ # Include if any of the values is matched by an include pattern
+ included = False
+ for value in values:
+ if include_re.search(value):
+ included = True
+ break
+ else:
+
+ # Include if all of the values are not matched by an include except pattern
+ included = True
+ for value in values:
+ if include_re.search(value):
+ included = False
+ break
+
+ if included == True:
+ break
+
+ if not included:
+ return included
+
+ if self.exclude_res:
+
+ for exclude_re, invert_match in self.exclude_res:
+
+ if not invert_match:
+
+ # Exclude if any of the values are matched by an exclude pattern
+ included = True
+ for value in values:
+ if exclude_re.search(value):
+ included = False
+ break
+ else:
+
+ # Exclude if all of the values are not matched by an exclude except patt
+ included = False
+ for value in values:
+ if exclude_re.search(value):
+ included = True
+ break
+
+ if included == False:
+ break
+
+ return included
+
+ def combine(self, other):
+ self.include_res.extend(other.include_res)
+ self.exclude_res.extend(other.exclude_res)
+
+
+def build_suite(tests, tag_query):
+
+ # Filter tests by tag
+ def tags_match(test):
+ return tag_query.matches_values(test.tags)
+
+ return filter(tags_match, tests)
diff --git a/buildscripts/smoke/testers.py b/buildscripts/smoke/testers.py
new file mode 100644
index 00000000000..012c74f84c6
--- /dev/null
+++ b/buildscripts/smoke/testers.py
@@ -0,0 +1,201 @@
+"""
+Testers - TestCase wrappers for tests of different types
+"""
+
+import unittest
+
+from external_programs import *
+from mongodb_programs import DBTest
+from mongodb_programs import MongoShell
+from mongodb_programs import MongoShellContext
+
+
+DEFAULT_TESTER_CLASSES = {"js_test": "smoke.JSUnitTest",
+ "db_test": "smoke.DBTestUnitTest",
+ "exe_test": "smoke.ExeUnitTest"}
+
+
+class JSUnitTest(unittest.TestCase):
+
+ """A MongoDB shell 'jstest' wrapped as a TestCase.
+
+ Allows fixtures to provide global variables and databases to connect to as API additions.
+
+ """
+
+ def __init__(self, jstest, test_apis, logger, shell_executable="./mongo", shell_options={},
+ *args, **kwargs):
+
+ unittest.TestCase.__init__(self, *args, **kwargs)
+
+ # Setup the description for the unit test
+ self._testMethodDoc = "JSTest %s" % jstest.filename
+
+ self.jstest = jstest
+ self.test_apis = test_apis
+ self.logger = logger
+
+ self.shell_executable = shell_executable
+ self.shell_options = {}
+ self.shell_options.update(shell_options)
+
+ self.shell_context = MongoShellContext()
+
+ def setUp(self):
+ try:
+ for api in self.test_apis:
+ api.add_to_shell(self.shell_context)
+ except:
+ self.logger.error("Setup failed for shell API.", exc_info=True)
+ raise
+
+ def runTest(self):
+
+ shell = MongoShell(executable=self.shell_executable,
+ shell_context=self.shell_context,
+ js_filenames=[self.jstest.filename],
+ context=ExternalContext(logger=self.logger),
+ **self.shell_options)
+
+ try:
+ self.logger.info("Starting MongoDB shell...\n%s" % shell)
+
+ shell.start()
+
+ self.logger.info("MongoDB shell started with pid %s." % shell.pid())
+
+ return_code = shell.wait()
+ if return_code != 0:
+ raise Exception("JSTest %s failed." % self.jstest.filename)
+
+ self.logger.info("MongoDB shell finished.")
+
+ except:
+ self.logger.error("MongoDB shell failed.", exc_info=True)
+ raise
+
+ def tearDown(self):
+ try:
+ for api in self.test_apis:
+ api.teardown_api()
+ except:
+ self.logger.error("Teardown failed for shell API.", exc_info=True)
+ raise
+
+
+class ExeUnitTest(unittest.TestCase):
+
+ """An arbitrary executable file wrapped as a TestCase.
+
+ Meant for use with C++ unit tests, for example.
+
+ Allows fixtures to provide environment variables as API additions.
+
+ """
+
+ def __init__(self, exetest, test_apis, logger,
+ program_options={},
+ *args, **kwargs):
+
+ unittest.TestCase.__init__(self, *args, **kwargs)
+ self.exetest = exetest
+ self.test_apis = test_apis
+ self.logger = logger
+
+ # Setup the description for the unit test
+ self._testMethodDoc = "Program %s" % self.exetest.filename
+
+ self.process_context = ExternalContext(logger=self.logger)
+ if program_options:
+ self.process_context.kwargs.update(program_options)
+
+ def setUp(self):
+ try:
+ for api in self.test_apis:
+ api.add_to_process(self.process_context)
+ except:
+ self.logger.error("Setup failed for process API.", exc_info=True)
+ raise
+
+ def runTest(self):
+
+ program = ExternalProgram(executable=self.exetest.filename,
+ context=self.process_context)
+
+ try:
+ self.logger.info("Starting Program...\n%s" % program)
+
+ program.start()
+
+ self.logger.info("Program %s started with pid %s." %
+ (self.exetest.filename, program.pid()))
+
+ return_code = program.wait()
+ if return_code != 0:
+ raise Exception("Program %s failed." % self.exetest.filename)
+
+ self.logger.info("Program finished.")
+
+ except:
+ self.logger.error("Program failed.", exc_info=True)
+ raise
+
+ def tearDown(self):
+ try:
+ for api in self.test_apis:
+ api.teardown_api()
+ except:
+ self.log.error("Teardown failed for process API.", exc_info=True)
+ raise
+
+
+class DBTestUnitTest(ExeUnitTest):
+
+ """A executable MongoDB 'dbtest' wrapped as a TestCase.
+
+ Individual dbtests can be specified optionally.
+
+ Allows fixtures to provide environment variables as API additions.
+
+ """
+
+ def __init__(self, dbtest, test_apis, logger,
+ dbtest_executable=None,
+ dbtest_options={},
+ *args, **kwargs):
+
+ ExeUnitTest.__init__(self, dbtest, test_apis, logger, dbtest_options,
+ *args, **kwargs)
+ self.dbtest = dbtest
+
+ self.dbtest_names = []
+ if "dbtest_names" in dbtest.metadata:
+ self.dbtest_names = dbtest.metadata["dbtest_names"]
+
+ # Setup the description for the unit test
+ self._testMethodDoc = "DBTest %s" % (" ".join(self.dbtest_names))
+
+ self.dbtest_executable = dbtest_executable
+
+ def runTest(self):
+
+ dbtest = DBTest(executable=self.dbtest_executable,
+ dbtest_names=self.dbtest_names,
+ context=self.process_context)
+ try:
+ self.logger.info("Starting DBTest...\n%s" % dbtest)
+
+ dbtest.start()
+
+ self.logger.info("DBTest %s started with pid %s." % (" ".join(self.dbtest_names),
+ dbtest.pid()))
+
+ return_code = dbtest.wait()
+ if return_code != 0:
+ raise Exception("DBTest %s failed." % (" ".join(self.dbtest_names)))
+
+ self.logger.info("DBTest finished.")
+
+ except:
+ self.logger.error("DBTest failed.", exc_info=True)
+ raise
diff --git a/buildscripts/smoke/tests.py b/buildscripts/smoke/tests.py
new file mode 100644
index 00000000000..a2612f12c87
--- /dev/null
+++ b/buildscripts/smoke/tests.py
@@ -0,0 +1,333 @@
+"""
+Utilities for building a database of tests from a file system with JSON metadata files.
+"""
+
+import glob
+import os
+import re
+
+from json_options import json_file_load
+from json_options import json_string_load
+from json_options import json_dump
+
+JSTEST_TYPE_RE = re.compile(r"^file://.*\.js$")
+DBTEST_TYPE_RE = re.compile(r"^dbtest://.*")
+
+
+def guess_is_metadata_file(filename):
+ filebase, ext = os.path.splitext(filename)
+ return ext == ".json" or ext == ".yaml" or ext == ".yml"
+
+
+def guess_test_type(uri):
+
+ if JSTEST_TYPE_RE.match(uri):
+ return "js_test"
+ elif DBTEST_TYPE_RE.match(uri):
+ return "db_test"
+ else:
+ return None
+
+
+def file_uri(filepath):
+ return "file://" + os.path.abspath(filepath)
+
+FILE_URI_RE = re.compile(r"^file://(.*)")
+
+
+def extract_filename(uri):
+ match = FILE_URI_RE.match(uri)
+ if not match:
+ return None
+ return match.group(1)
+
+
+class Test(object):
+
+ """A test object of a particular type, at a particular URI, with metadata.
+
+ Often filenames are also set - though this is not required.
+
+ """
+
+ def __init__(self, uri=None, filename=None, test_type=None, tags=[], **metadata):
+
+ self.uri = uri
+ self.filename = os.path.abspath(filename)
+ self.test_type = test_type
+ self.tags = tags
+ self.metadata = metadata
+
+ if not self.uri:
+ if not self.filename:
+ raise Exception("Test must have either a URI or a filename specified.")
+ else:
+ self.uri = file_uri(self.filename)
+
+ if not self.filename:
+ self.filename = extract_filename(uri)
+
+ if not self.test_type:
+ self.test_type = guess_test_type(self.uri)
+
+ if not self.test_type:
+ raise Exception("Test at %s is of unknown type." % self.uri)
+
+ self.rebuild_tags()
+
+ def strip_meta_tags(self):
+ ordinary_tags = []
+ for tag in self.tags:
+ if not tag.startswith("meta."):
+ ordinary_tags.append(tag)
+
+ return ordinary_tags
+
+ def rebuild_tags(self):
+
+ meta_tags = ["meta.uri.%s" % self.uri, "meta.test_type.%s" % self.test_type]
+ self.tags = meta_tags + self.strip_meta_tags()
+
+ def __str__(self):
+ return "Test(%s,%s,%s)" % (self.test_type, self.uri, self.tags)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __setstate__(self, state):
+ self.__init__(**state)
+
+ def __getstate__(self, metadata_filename=None):
+
+ # Inline 'metadata'
+ state = dict(self.__dict__.items())
+ del state["metadata"]
+ if len(self.metadata) > 0:
+ state.update(self.metadata.items())
+
+ # Remove "meta." tags
+ state["tags"] = self.strip_meta_tags()
+
+ # Compute relative path of filename if one exists, use instead of absolute uri
+ if self.filename and metadata_filename:
+
+ abs_filename = self.filename
+ abs_metadata_path = os.path.split(os.path.abspath(metadata_filename))[0]
+ common_prefix = os.path.commonprefix([abs_metadata_path, abs_filename])
+ state["filename"] = os.path.relpath(abs_filename, common_prefix)
+ del state["uri"]
+
+ return state
+
+
+def visit_files_matching(root,
+ file_query,
+ path_visitor,
+ file_visitor,
+ is_glob_pattern=False,
+ use_abs_paths=False):
+
+ glob_pattern = None
+ if is_glob_pattern:
+ glob_pattern = root
+ root = None
+
+ if use_abs_paths:
+ root = os.path.abspath(root)
+
+ paths_seen = set([])
+
+ def visit_file(filename):
+ if file_query and not file_query.matches(filename):
+ return
+
+ parent_path, filename_only = os.path.split(filename)
+
+ if path_visitor and not parent_path in paths_seen:
+ path_visitor(parent_path)
+ paths_seen.add(parent_path)
+
+ if file_visitor:
+ file_visitor(parent_path, filename_only)
+
+ if glob_pattern:
+ for filename in glob.iglob(glob_pattern):
+ visit_file(filename)
+ else:
+ for path, dirnames, filenames in os.walk(root):
+ for filename in filenames:
+ visit_file(os.path.join(path, filename))
+
+DEFAULT_TAG_FILENAME = "test_metadata.json"
+
+
+def build_tests(roots, file_query=None, extract_metadata=False,
+ default_metadata_filename=DEFAULT_TAG_FILENAME):
+ """Builds a database (list) of tests given a number of filesystem 'roots' and a regex query.
+
+ Searches directories recursively, and can also handle metadata files given directly as roots or
+ glob-style searches.
+
+ """
+
+ if not roots:
+ roots = ["./"]
+
+ all_tests = {}
+
+ def metadata_visitor(path, metadata_filename=None, test_filenames=None):
+
+ if not metadata_filename:
+ metadata_filename = default_metadata_filename
+
+ metadata_filepath = os.path.join(path, metadata_filename)
+
+ if not os.path.exists(metadata_filepath):
+ return []
+
+ test_metadatas = json_file_load(metadata_filepath)
+ metadata_tests = {}
+
+ if isinstance(test_metadatas, (list, tuple)):
+ for test_metadata in test_metadatas:
+
+ # The filename path is relative to the metadata file dir if not absolute
+ if "filename" in test_metadata:
+ filename = test_metadata["filename"]
+ if not os.path.isabs(filename):
+ test_metadata["filename"] = os.path.join(path, filename)
+
+ test = Test(**test_metadata)
+ if test_filenames is None or test.filename in test_filenames:
+ metadata_tests[test.uri] = test
+ all_tests[test.uri] = test
+
+ return metadata_tests.values()
+
+ def test_visitor(path, filename):
+
+ # The filename path is relative to the root we started the search from
+ test_uri = file_uri(os.path.join(path, filename))
+
+ if test_uri in all_tests:
+ test = all_tests[test_uri]
+ else:
+ test = Test(filename=os.path.join(path, filename))
+ all_tests[test.uri] = test
+
+ if extract_metadata:
+ extract_test_metadata(test)
+
+ # Gather test metadata and tests
+
+ root_metadata_files = \
+ filter(lambda root: os.path.isfile(root) and guess_is_metadata_file(root), roots)
+ root_test_files = \
+ filter(lambda root: os.path.isfile(root) and not guess_is_metadata_file(root), roots)
+ root_globs = \
+ filter(lambda root: not os.path.isfile(root), roots)
+
+ for root in root_metadata_files:
+ # Load metadata from root metadata files
+ metadata_tests = metadata_visitor(*os.path.split(root))
+ if extract_metadata:
+ # Also extract metadata from tests if we need to
+ for metadata_test in metadata_tests:
+ if metadata_test.filename:
+ test_visitor(*os.path.split(metadata_test.filename))
+
+ metadata_paths = {}
+ for root in root_test_files:
+ metadata_path = os.path.split(root)[0]
+ if metadata_path not in metadata_paths:
+ metadata_paths[metadata_path] = set([])
+
+ metadata_paths[metadata_path].add(os.path.abspath(root))
+
+ for metadata_path, test_filenames in metadata_paths.iteritems():
+ # Load metadata from test files' associated metadata files
+ metadata_visitor(metadata_path, metadata_filename=None, test_filenames=test_filenames)
+
+ for root in root_test_files:
+ # Load metadata from the test itself
+ test_visitor(*os.path.split(root))
+
+ for root in root_globs:
+ # If this is a directory or glob pattern, visit the directory or pattern
+ # and extract metadata from metadata files and potentially test files
+ is_glob_pattern = not os.path.isdir(root)
+ visit_files_matching(root,
+ file_query,
+ metadata_visitor,
+ test_visitor,
+ is_glob_pattern=is_glob_pattern)
+
+ return all_tests.values()
+
+
+#
+# Below is all utilities for "tags" extraction from jstests
+#
+
+
+JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
+
+
+def extract_jstest_metadata(jstest):
+
+ with open(jstest.filename) as jstest_file:
+ tags_match = JSTEST_TAGS_RE.match(jstest_file.read())
+ if tags_match:
+
+ tags = None
+ try:
+ tags = json_string_load(tags_match.group(1))
+ except Exception as ex:
+ raise Exception(
+ "Could not load tags from file %s: %s" % (jstest.filename,
+ tags_match.group(1)), ex)
+ all_tags = set(jstest.strip_meta_tags() + tags)
+ jstest.tags = [tag for tag in all_tags]
+ jstest.rebuild_tags()
+
+
+def extract_test_metadata(test):
+
+ if test.test_type == "js_test":
+ extract_jstest_metadata(test)
+
+
+def extract_metadata(tests):
+
+ for test in tests:
+ extract_test_metadata(test)
+
+
+def write_metadata(tests, filename=None,
+ default_metadata_filename=DEFAULT_TAG_FILENAME,
+ json_only=False):
+
+ metadata_file_tests = {}
+
+ for test in tests:
+
+ metadata_filename = filename
+
+ if not metadata_filename:
+ test_path, test_filename = os.path.split(test.filename)
+ metadata_filename = os.path.join(test_path, default_metadata_filename)
+
+ metadata_filename = os.path.abspath(metadata_filename)
+
+ if metadata_filename not in metadata_file_tests:
+ metadata_file_tests[metadata_filename] = []
+
+ tests_in_file = metadata_file_tests[metadata_filename]
+ tests_in_file.append(test)
+
+ for metadata_filename, tests_in_file in metadata_file_tests.iteritems():
+ with open(metadata_filename, 'w') as metadata_file:
+ test_metadata = []
+ for test in tests_in_file:
+ test_metadata.append(test.__getstate__(metadata_filename))
+ metadata_file.write(json_dump(test_metadata, json_only))
diff --git a/buildscripts/smoke_config/__init__.py b/buildscripts/smoke_config/__init__.py
new file mode 100644
index 00000000000..07736e7be49
--- /dev/null
+++ b/buildscripts/smoke_config/__init__.py
@@ -0,0 +1,26 @@
+
+import os
+
+USER_CONFIG_DIRS = (os.path.split(__file__)[0], "~/.smoke_config")
+
+
+def get_named_configs(search_paths=USER_CONFIG_DIRS):
+ """Extract named JSON configurations from specified paths."""
+ named_configs = {}
+ for search_path in search_paths:
+
+ search_path = os.path.expanduser(search_path)
+ if not os.path.isdir(search_path):
+ continue
+
+ for path, dirnames, filenames in os.walk(search_path):
+
+ for filename in filenames:
+
+ filebase, ext = os.path.splitext(filename)
+ if ext != ".json" and ext != ".yaml" and ext != ".yml":
+ continue
+
+ named_configs[filebase] = os.path.abspath(os.path.join(path, filename))
+
+ return named_configs
diff --git a/buildscripts/smoke_config/auth.yaml b/buildscripts/smoke_config/auth.yaml
new file mode 100644
index 00000000000..102c8721b3f
--- /dev/null
+++ b/buildscripts/smoke_config/auth.yaml
@@ -0,0 +1,27 @@
+# Options to set for authorization with a mongod host
+suite:
+ exclude_tags:
+ - ^auth_internal$
+
+executor:
+ fixtures:
+ mongodb_server:
+ mongod_options:
+ auth: ''
+ keyFile: ./jstests/libs/authTestsKey
+ set_parameters:
+ enableLocalhostAuthBypass: false
+ shell_globals:
+ TestData:
+ auth: true
+ authMechanism: MONGODB-CR
+ keyFile: ./jstests/lib/authTestsKey
+ keyFileData: Thiskeyisonlyforrunningthesuitewithauthenticationdontuseitinanytestsdirectly
+ testers:
+ js_test:
+ shell_options:
+ authenticationDatabase: local
+ authenticationMechanism: MONGODB-CR
+ password: Thiskeyisonlyforrunningthesuitewithauthenticationdontuseitinanytestsdirectly
+ username: __system
+
diff --git a/buildscripts/smoke_config/auth_shell.yaml b/buildscripts/smoke_config/auth_shell.yaml
new file mode 100644
index 00000000000..56388ebb555
--- /dev/null
+++ b/buildscripts/smoke_config/auth_shell.yaml
@@ -0,0 +1,21 @@
+# Options to set for authorization with a mongod host
+suite:
+ exclude_tags:
+ - ^auth_internal$
+
+executor:
+ fixtures:
+ shell_globals:
+ TestData:
+ auth: true
+ authMechanism: MONGODB-CR
+ keyFile: ./jstests/lib/authTestsKey
+ keyFileData: Thiskeyisonlyforrunningthesuitewithauthenticationdontuseitinanytestsdirectly
+ testers:
+ js_test:
+ shell_options:
+ authenticationDatabase: local
+ authenticationMechanism: MONGODB-CR
+ password: Thiskeyisonlyforrunningthesuitewithauthenticationdontuseitinanytestsdirectly
+ username: __system
+
diff --git a/buildscripts/smoke_config/disk.yaml b/buildscripts/smoke_config/disk.yaml
new file mode 100644
index 00000000000..cec6897d03c
--- /dev/null
+++ b/buildscripts/smoke_config/disk.yaml
@@ -0,0 +1,11 @@
+# Disk tests
+tests:
+ roots:
+ - ./jstests/disk/*.js
+executor:
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ jstest: {}
+
diff --git a/buildscripts/smoke_config/executor_default.yaml b/buildscripts/smoke_config/executor_default.yaml
new file mode 100644
index 00000000000..bb4b80b484e
--- /dev/null
+++ b/buildscripts/smoke_config/executor_default.yaml
@@ -0,0 +1,9 @@
+executor:
+ fixtures: {}
+
+ testers:
+ # Test runners for the suite
+
+ js_test: {}
+ db_test: {}
+ exe_test: {}
diff --git a/buildscripts/smoke_config/jscore.yaml b/buildscripts/smoke_config/jscore.yaml
new file mode 100644
index 00000000000..c34e9bc6608
--- /dev/null
+++ b/buildscripts/smoke_config/jscore.yaml
@@ -0,0 +1,21 @@
+tests:
+ # JSCore root
+ roots:
+ - ./jstests/core/*.js
+
+executor:
+
+ fixtures:
+
+ # Single MongoD running in background
+ mongodb_server:
+ fixture_class: smoke.SingleMongoDFixture
+ mongod_options:
+ nopreallocj: ''
+
+ # Global shell context
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+
+ testers:
+ js_test: {}
diff --git a/buildscripts/smoke_config/log_console.yaml b/buildscripts/smoke_config/log_console.yaml
new file mode 100644
index 00000000000..2f7ea9deca1
--- /dev/null
+++ b/buildscripts/smoke_config/log_console.yaml
@@ -0,0 +1,16 @@
+logging:
+ version: 1
+
+ handlers:
+ default:
+ level: INFO
+ class: logging.StreamHandler
+
+ loggers:
+ "":
+ handlers:
+ - default
+ level: INFO
+ propagate: True
+
+ \ No newline at end of file
diff --git a/buildscripts/smoke_config/log_default.yaml b/buildscripts/smoke_config/log_default.yaml
new file mode 100644
index 00000000000..4165954c075
--- /dev/null
+++ b/buildscripts/smoke_config/log_default.yaml
@@ -0,0 +1,39 @@
+suite: {}
+executor:
+ fail_fast: false
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ db_test: {}
+ exe_test: {}
+ js_test: {}
+logging:
+ disable_existing_loggers: true
+ formatters:
+ standard:
+ format: '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
+ handlers:
+ fixtures:
+ class: logging.FileHandler
+ filename: ./fixtures.log
+ level: INFO
+ mode: w
+ testers:
+ class: logging.FileHandler
+ filename: ./tests.log
+ level: INFO
+ mode: w
+ loggers:
+ executor.fixtures:
+ handlers:
+ - fixtures
+ level: INFO
+ propagate: false
+ executor.testers:
+ handlers:
+ - testers
+ level: INFO
+ propagate: false
+ version: 1
+
diff --git a/buildscripts/smoke_config/log_file.yaml b/buildscripts/smoke_config/log_file.yaml
new file mode 100644
index 00000000000..a5e8b089596
--- /dev/null
+++ b/buildscripts/smoke_config/log_file.yaml
@@ -0,0 +1,39 @@
+suite: {}
+executor:
+ fail_fast: false
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ db_test: {}
+ exe_test: {}
+ js_test: {}
+logging:
+ disable_existing_loggers: true
+ formatters:
+ standard:
+ format: '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
+ handlers:
+ fixtures:
+ class: logging.FileHandler
+ filename: ./fixtures.log
+ level: INFO
+ mode: w
+ testers:
+ class: logging.FileHandler
+ filename: ./testers.log
+ level: INFO
+ mode: w
+ loggers:
+ executor.fixtures:
+ handlers:
+ - fixtures
+ level: INFO
+ propagate: false
+ executor.testers:
+ handlers:
+ - testers
+ level: INFO
+ propagate: false
+ version: 1
+
diff --git a/buildscripts/smoke_config/log_suppress.yaml b/buildscripts/smoke_config/log_suppress.yaml
new file mode 100644
index 00000000000..668912bfefb
--- /dev/null
+++ b/buildscripts/smoke_config/log_suppress.yaml
@@ -0,0 +1,20 @@
+suite: {}
+executor:
+ fail_fast: false
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ db_test: {}
+ exe_test: {}
+ js_test: {}
+logging:
+ handlers:
+ default:
+ class: logging.NullHandler
+ loggers:
+ ? ''
+ : handlers:
+ - default
+ version: 1
+
diff --git a/buildscripts/smoke_config/master_slave.yaml b/buildscripts/smoke_config/master_slave.yaml
new file mode 100644
index 00000000000..362f300ef75
--- /dev/null
+++ b/buildscripts/smoke_config/master_slave.yaml
@@ -0,0 +1,8 @@
+# Options for master/slave (small oplog) tests
+executor:
+ fixtures:
+ mongodb_server:
+ fixture_class: smoke.MasterSlaveFixture
+ master_options:
+ oplogSize: 100
+ slave_options: {}
diff --git a/buildscripts/smoke_config/no_server.yaml b/buildscripts/smoke_config/no_server.yaml
new file mode 100644
index 00000000000..3075d45a05b
--- /dev/null
+++ b/buildscripts/smoke_config/no_server.yaml
@@ -0,0 +1,9 @@
+executor:
+ fixtures:
+
+ # Global shell context
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+
+ testers:
+ js_test: {}
diff --git a/buildscripts/smoke_config/replicasets.yaml b/buildscripts/smoke_config/replicasets.yaml
new file mode 100644
index 00000000000..316b5e47ac3
--- /dev/null
+++ b/buildscripts/smoke_config/replicasets.yaml
@@ -0,0 +1,11 @@
+# Replica set tests
+tests:
+ roots:
+ - ./jstests/replsets/*.js
+executor:
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ jstest: {}
+
diff --git a/buildscripts/smoke_config/sharding.yaml b/buildscripts/smoke_config/sharding.yaml
new file mode 100644
index 00000000000..1ca5f31a5e2
--- /dev/null
+++ b/buildscripts/smoke_config/sharding.yaml
@@ -0,0 +1,11 @@
+# Sharding tests
+tests:
+ roots:
+ - ./jstests/sharding/*.js
+executor:
+ fixtures:
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+ testers:
+ jstest: {}
+
diff --git a/buildscripts/smoke_config/unittests.yaml b/buildscripts/smoke_config/unittests.yaml
new file mode 100644
index 00000000000..b127a79bdf9
--- /dev/null
+++ b/buildscripts/smoke_config/unittests.yaml
@@ -0,0 +1,7 @@
+tests:
+ # Generated by SCons
+ roots:
+ - build/unittests.json
+executor:
+ testers:
+ exe_test: {}
diff --git a/buildscripts/smoke_config/with_server.yaml b/buildscripts/smoke_config/with_server.yaml
new file mode 100644
index 00000000000..4ea65b57f55
--- /dev/null
+++ b/buildscripts/smoke_config/with_server.yaml
@@ -0,0 +1,15 @@
+executor:
+ fixtures:
+
+ # Single MongoD running in background
+ mongodb_server:
+ fixture_class: smoke.SingleMongoDFixture
+ mongod_options:
+ nopreallocj: ''
+
+ # Global shell context
+ shell_globals:
+ fixture_class: smoke.GlobalShellFixture
+
+ testers:
+ js_test: {}
diff --git a/jstests/sharding/cleanup_orphaned_cmd_hashed.js b/jstests/sharding/cleanup_orphaned_cmd_hashed.js
index 219f23d3282..52cc1657c1d 100644
--- a/jstests/sharding/cleanup_orphaned_cmd_hashed.js
+++ b/jstests/sharding/cleanup_orphaned_cmd_hashed.js
@@ -1,5 +1,6 @@
//
// Tests cleanup of orphaned data in hashed sharded coll via the orphaned data cleanup command
+// @tags : [ hashed ]
//
var options = { separateConfig : true, shardOptions : { verbose : 2 } };
diff --git a/jstests/sharding/hash_basic.js b/jstests/sharding/hash_basic.js
index 52a10df71cc..0cd11e5c1bf 100644
--- a/jstests/sharding/hash_basic.js
+++ b/jstests/sharding/hash_basic.js
@@ -1,3 +1,6 @@
+/**
+ * @tags : [ hashed ]
+ */
var st = new ShardingTest({ shards: 2, chunkSize: 1, other: { shardOptions: { verbose: 1 }} });
st.stopBalancer();
diff --git a/jstests/sharding/hash_shard1.js b/jstests/sharding/hash_shard1.js
index 50c8e9261c8..dc164324c2a 100644
--- a/jstests/sharding/hash_shard1.js
+++ b/jstests/sharding/hash_shard1.js
@@ -1,5 +1,6 @@
// Basic test of sharding with a hashed shard key
// - Test basic migrations with moveChunk, using different chunk specification methods
+// @tags : [ hashed ]
var s = new ShardingTest( { name : jsTestName() , shards : 3 , mongos : 1, verbose : 1 } );
var dbname = "test";
diff --git a/jstests/sharding/hash_shard_non_empty.js b/jstests/sharding/hash_shard_non_empty.js
index f45e1c2113c..25335f16610 100644
--- a/jstests/sharding/hash_shard_non_empty.js
+++ b/jstests/sharding/hash_shard_non_empty.js
@@ -1,4 +1,5 @@
// Hash sharding on a non empty collection should not pre-split.
+// @tags : [ hashed ]
var s = new ShardingTest({ name : jsTestName(), shards : 3, mongos : 1, verbose : 1 });
var dbname = "test";
diff --git a/jstests/sharding/hash_shard_num_chunks.js b/jstests/sharding/hash_shard_num_chunks.js
index cf4bb481c14..ee7f752d3d4 100644
--- a/jstests/sharding/hash_shard_num_chunks.js
+++ b/jstests/sharding/hash_shard_num_chunks.js
@@ -1,4 +1,5 @@
// Hash sharding with initial chunk count set.
+// @tags : [ hashed ]
var s = new ShardingTest({ shards : 3, mongos : 1, verbose : 1 });
var dbname = "test";
diff --git a/jstests/sharding/hash_shard_unique_compound.js b/jstests/sharding/hash_shard_unique_compound.js
index 832cb93600f..c16e85bbed2 100644
--- a/jstests/sharding/hash_shard_unique_compound.js
+++ b/jstests/sharding/hash_shard_unique_compound.js
@@ -2,6 +2,7 @@
// Does 2 things and checks for consistent error:
// 1.) shard collection on hashed "a", ensure unique index {a:1, b:1}
// 2.) reverse order
+// @tags : [ hashed ]
var s = new ShardingTest( { name : jsTestName() , shards : 1 , mongos : 1, verbose : 1 } );
var dbName = "test";
diff --git a/jstests/sharding/hash_single_shard.js b/jstests/sharding/hash_single_shard.js
index 7dc9d6df078..705bbaab665 100644
--- a/jstests/sharding/hash_single_shard.js
+++ b/jstests/sharding/hash_single_shard.js
@@ -1,4 +1,5 @@
// Test hashed presplit with 1 shard.
+// @tags : [ hashed ]
var st = new ShardingTest({ shards: 1 });
var testDB = st.getDB('test');
diff --git a/jstests/sharding/hash_skey_split.js b/jstests/sharding/hash_skey_split.js
index 58ebfaa6112..817e6154c4a 100644
--- a/jstests/sharding/hash_skey_split.js
+++ b/jstests/sharding/hash_skey_split.js
@@ -1,3 +1,6 @@
+/**
+ * @tags : [ hashed ]
+ */
var st = new ShardingTest({ shards: 2, other: { shardOptions: { verbose: 1 }} });
st.stopBalancer();
diff --git a/jstests/sharding/index1.js b/jstests/sharding/index1.js
index 2767c18dab3..9c223f89651 100644
--- a/jstests/sharding/index1.js
+++ b/jstests/sharding/index1.js
@@ -1,3 +1,7 @@
+/**
+ * @tags : [ hashed ]
+ */
+
// from server 2326 - make sure that sharding only works with unique indices
s = new ShardingTest( "shard_index", 2, 0, 1 )
diff --git a/jstests/sharding/missing_key.js b/jstests/sharding/missing_key.js
index 2eebc0d0912..d83b81e5a90 100644
--- a/jstests/sharding/missing_key.js
+++ b/jstests/sharding/missing_key.js
@@ -1,5 +1,6 @@
// Test that the shardCollection command fails when a preexisting document lacks a shard key field.
// SERVER-8772
+// @tags : [ hashed ]
var st = new ShardingTest( { shards: 1 } );
st.stopBalancer();
diff --git a/jstests/sharding/regex_targeting.js b/jstests/sharding/regex_targeting.js
index 777d1c184c7..3fad66e2c51 100644
--- a/jstests/sharding/regex_targeting.js
+++ b/jstests/sharding/regex_targeting.js
@@ -1,5 +1,6 @@
//
// This checks to make sure that sharded regex queries behave the same as unsharded regex queries
+// @tags : [ hashed ]
//
var options = { mongosOptions : { binVersion : "" },
diff --git a/site_scons/site_tools/unittest.py b/site_scons/site_tools/mongo_unittest.py
index 93ce02fefac..70d66a02833 100644
--- a/site_scons/site_tools/unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -1,6 +1,11 @@
"""Pseudo-builders for building and registering unit tests.
"""
+import os
+import json
+
+from buildscripts import smoke
+
def exists(env):
return True
@@ -17,8 +22,24 @@ def unit_test_list_builder_action(env, target, source):
ofile.write('%s\n' % s)
finally:
ofile.close()
+
+ dir, filename = os.path.split(str(target[0]))
+ filebase, ext = os.path.splitext(filename)
+
+ # Generate metadata file for unit tests
+ metadata_filename = os.path.join(dir, ".".join([filebase, "json"]))
+ print "Generating metadata file %s" % metadata_filename
+
+ tests = []
+ for s in source:
+ tests.append(smoke.tests.Test(filename=str(s), test_type="exe_test"))
+ print '\t' + str(s)
+
+ # For now, write JSON to avoid YAML parsing dependency
+ smoke.tests.write_metadata(tests, metadata_filename, json_only=True)
def build_cpp_unit_test(env, target, source, **kwargs):
+
libdeps = kwargs.get('LIBDEPS', [])
libdeps.append( '$BUILD_DIR/mongo/unittest/unittest_main' )