summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbuildscripts/resmoke.py15
-rw-r--r--buildscripts/resmokelib/config.py27
-rw-r--r--buildscripts/resmokelib/parser.py94
-rw-r--r--buildscripts/resmokelib/testing/executor.py21
-rw-r--r--buildscripts/resmokelib/testing/fixtures/masterslave.py3
-rw-r--r--buildscripts/resmokelib/testing/fixtures/replicaset.py3
-rw-r--r--buildscripts/resmokelib/testing/fixtures/shardedcluster.py3
-rw-r--r--buildscripts/resmokelib/testing/fixtures/standalone.py3
-rw-r--r--buildscripts/resmokelib/testing/hook_test_archival.py116
-rw-r--r--buildscripts/resmokelib/testing/job.py46
-rw-r--r--buildscripts/resmokelib/utils/__init__.py2
-rw-r--r--buildscripts/resmokelib/utils/archival.py35
-rw-r--r--etc/evergreen.yml31
13 files changed, 314 insertions, 85 deletions
diff --git a/buildscripts/resmoke.py b/buildscripts/resmoke.py
index 37793800f4d..6624a881c1e 100755
--- a/buildscripts/resmoke.py
+++ b/buildscripts/resmoke.py
@@ -65,8 +65,18 @@ def _execute_suite(suite):
suite.return_code = 0
return False
+ archive = None
+ if resmokelib.config.ARCHIVE_FILE:
+ archive = resmokelib.utils.archival.Archival(
+ archival_json_file=resmokelib.config.ARCHIVE_FILE,
+ execution=resmokelib.config.EVERGREEN_EXECUTION,
+ limit_size_mb=resmokelib.config.ARCHIVE_LIMIT_MB,
+ limit_files=resmokelib.config.ARCHIVE_LIMIT_TESTS,
+ logger=logger)
+
executor_config = suite.get_executor_config()
- executor = resmokelib.testing.executor.TestSuiteExecutor(logger, suite, **executor_config)
+ executor = resmokelib.testing.executor.TestSuiteExecutor(
+ logger, suite, archive_instance=archive, **executor_config)
try:
executor.run()
@@ -83,6 +93,9 @@ def _execute_suite(suite):
suite.test_kind, suite.get_display_name())
suite.return_code = 2
return False
+ finally:
+ if archive:
+ archive.exit()
def _log_summary(logger, suites, time_taken):
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
index 1dcd7d77932..9a68c88ce01 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -35,6 +35,9 @@ MONGO_RUNNER_SUBDIR = "mongorunner"
# Names below correspond to how they are specified via the command line or in the options YAML file.
DEFAULTS = {
+ "archiveFile": None,
+ "archiveLimitMb": 5000,
+ "archiveLimitTests": 10,
"basePort": 20000,
"buildloggerUrl": "https://logkeeper.mongodb.org",
"continueOnFailure": False,
@@ -43,6 +46,8 @@ DEFAULTS = {
"distroId": None,
"dryRun": None,
"excludeWithAnyTags": None,
+ "executionNumber": 0,
+ "gitRevision": None,
"includeWithAnyTags": None,
"jobs": 1,
"mongo": None,
@@ -55,6 +60,7 @@ DEFAULTS = {
"shellPort": None,
"shellConnString": None,
"patchBuild": False,
+ "projectName": "mongodb-mongo-master",
"repeat": 1,
"reportFailureStatus": "fail",
"reportFile": None,
@@ -165,6 +171,15 @@ SuiteOptions.ALL_INHERITED = SuiteOptions(**dict(zip(SuiteOptions._fields,
# Variables that are set by the user at the command line or with --options.
##
+# The name of the archive JSON file used to associate S3 archives to an Evergreen task.
+ARCHIVE_FILE = None
+
+# The limit size of all archive files for an Evergreen task.
+ARCHIVE_LIMIT_MB = None
+
+# The limit number of tests to archive for an Evergreen task.
+ARCHIVE_LIMIT_TESTS = None
+
# The starting port number to use for mongod and mongos processes spawned by resmoke.py and the
# mongo shell.
BASE_PORT = None
@@ -186,9 +201,18 @@ DRY_RUN = None
# The identifier for the Evergreen distro that resmoke.py is being run on.
EVERGREEN_DISTRO_ID = None
+# The number of the Evergreen execution that resmoke.py is being run on.
+EVERGREEN_EXECUTION = None
+
# If true, then resmoke.py is being run as part of a patch build in Evergreen.
EVERGREEN_PATCH_BUILD = None
+# The name of the Evergreen project that resmoke.py is being run on.
+EVERGREEN_PROJECT_NAME = None
+
+# The git revision of the Evergreen task that resmoke.py is being run on.
+EVERGREEN_REVISION = None
+
# The identifier for the Evergreen task that resmoke.py is being run under. If set, then the
# Evergreen task id value will be transmitted to logkeeper when creating builds and tests.
EVERGREEN_TASK_ID = None
@@ -301,6 +325,9 @@ WT_INDEX_CONFIG = None
# Internally used configuration options that aren't exposed to the user
##
+# S3 Bucket to upload archive files.
+ARCHIVE_BUCKET = "mongodatafiles"
+
# Default sort order for test execution. Will only be changed if --suites wasn't specified.
ORDER_TESTS_BY_NAME = True
diff --git a/buildscripts/resmokelib/parser.py b/buildscripts/resmokelib/parser.py
index 5faf5058358..33f9de51d24 100644
--- a/buildscripts/resmokelib/parser.py
+++ b/buildscripts/resmokelib/parser.py
@@ -19,6 +19,9 @@ from .. import resmokeconfig
# Mapping of the attribute of the parsed arguments (dest) to its key as it appears in the options
# YAML configuration file. Most should only be converting from snake_case to camelCase.
DEST_TO_CONFIG = {
+ "archive_file": "archiveFile",
+ "archive_limit_mb": "archiveLimitMb",
+ "archive_limit_tests": "archiveLimitTests",
"base_port": "basePort",
"buildlogger_url": "buildloggerUrl",
"continue_on_failure": "continueOnFailure",
@@ -27,6 +30,8 @@ DEST_TO_CONFIG = {
"distro_id": "distroId",
"dry_run": "dryRun",
"exclude_with_any_tags": "excludeWithAnyTags",
+ "execution_number": "executionNumber",
+ "git_revision": "gitRevision",
"include_with_any_tags": "includeWithAnyTags",
"jobs": "jobs",
"mongo_executable": "mongo",
@@ -38,6 +43,7 @@ DEST_TO_CONFIG = {
"num_clients_per_fixture": "numClientsPerFixture",
"patch_build": "patchBuild",
"prealloc_journal": "preallocJournal",
+ "project_name": "projectName",
"repeat": "repeat",
"report_failure_status": "reportFailureStatus",
"report_file": "reportFile",
@@ -85,6 +91,23 @@ def parse_command_line():
parser.add_option("--options", dest="options_file", metavar="OPTIONS",
help="A YAML file that specifies global options to resmoke.py.")
+ parser.add_option("--archiveFile", dest="archive_file", metavar="ARCHIVE_FILE",
+ help=("Sets the archive file name for the Evergreen task running the tests."
+ " The archive file is JSON format containing a list of tests that were"
+ " successfully archived to S3. If unspecified, no data files from tests"
+ " will be archived in S3. Tests can be designated for archival in the"
+ " task suite configuration file."))
+
+ parser.add_option("--archiveLimitMb", type="int", dest="archive_limit_mb",
+ metavar="ARCHIVE_LIMIT_MB",
+ help=("Sets the limit (in MB) for archived files to S3. A value of 0"
+ " indicates there is no limit."))
+
+ parser.add_option("--archiveLimitTests", type="int", dest="archive_limit_tests",
+ metavar="ARCHIVE_LIMIT_TESTS",
+ help=("Sets the maximum number of tests to archive to S3. A value"
+ " of 0 indicates there is no limit."))
+
parser.add_option("--basePort", dest="base_port", metavar="PORT",
help=("The starting port number to use for mongod and mongos processes"
" spawned by resmoke.py or the tests themselves. Each fixture and Job"
@@ -109,7 +132,7 @@ def parse_command_line():
" specified tags will be excluded from any suites that are run."))
parser.add_option("-f", "--findSuites", action="store_true", dest="find_suites",
- help="List the names of the suites that will execute the specified tests.")
+ help="Lists the names of the suites that will execute the specified tests.")
parser.add_option("--includeWithAnyTags", action="append", dest="include_with_any_tags",
metavar="TAG1,TAG2",
@@ -118,20 +141,20 @@ def parse_command_line():
" run."))
parser.add_option("-n", action="store_const", const="tests", dest="dry_run",
- help=("Output the tests that would be run."))
+ help=("Outputs the tests that would be run."))
# TODO: add support for --dryRun=commands
parser.add_option("--dryRun", type="choice", action="store", dest="dry_run",
choices=("off", "tests"), metavar="MODE",
- help=("Instead of running the tests, output the tests that would be run"
+ help=("Instead of running the tests, outputs the tests that would be run"
" (if MODE=tests). Defaults to MODE=%default."))
parser.add_option("-j", "--jobs", type="int", dest="jobs", metavar="JOBS",
- help=("The number of Job instances to use. Each instance will receive its own"
- " MongoDB deployment to dispatch tests to."))
+ help=("The number of Job instances to use. Each instance will receive its"
+ " own MongoDB deployment to dispatch tests to."))
parser.add_option("-l", "--listSuites", action="store_true", dest="list_suites",
- help="List the names of the suites available to execute.")
+ help="Lists the names of the suites available to execute.")
parser.add_option("--mongo", dest="mongo_executable", metavar="PATH",
help="The path to the mongo shell executable for resmoke.py to use.")
@@ -141,7 +164,7 @@ def parse_command_line():
parser.add_option("--mongodSetParameters", dest="mongod_parameters",
metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Pass one or more --setParameter options to all mongod processes"
+ help=("Passes one or more --setParameter options to all mongod processes"
" started by resmoke.py. The argument is specified as bracketed YAML -"
" i.e. JSON with support for single quoted and unquoted keys."))
@@ -150,27 +173,27 @@ def parse_command_line():
parser.add_option("--mongosSetParameters", dest="mongos_parameters",
metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Pass one or more --setParameter options to all mongos processes"
+ help=("Passes one or more --setParameter options to all mongos processes"
" started by resmoke.py. The argument is specified as bracketed YAML -"
" i.e. JSON with support for single quoted and unquoted keys."))
parser.add_option("--nojournal", action="store_true", dest="no_journal",
- help="Disable journaling for all mongod's.")
+ help="Disables journaling for all mongod's.")
parser.add_option("--nopreallocj", action="store_const", const="off", dest="prealloc_journal",
- help="Disable preallocation of journal files for all mongod processes.")
+ help="Disables preallocation of journal files for all mongod processes.")
parser.add_option("--numClientsPerFixture", type="int", dest="num_clients_per_fixture",
help="Number of clients running tests per fixture")
parser.add_option("--preallocJournal", type="choice", action="store", dest="prealloc_journal",
choices=("on", "off"), metavar="ON|OFF",
- help=("Enable or disable preallocation of journal files for all mongod"
+ help=("Enables or disables preallocation of journal files for all mongod"
" processes. Defaults to %default."))
parser.add_option("--shellConnString", dest="shell_conn_string",
metavar="CONN_STRING",
- help="Override the default fixture and connect to an existing MongoDB"
+ help="Overrides the default fixture and connect to an existing MongoDB"
" cluster instead. This is useful for connecting to a MongoDB"
" deployment started outside of resmoke.py including one running in a"
" debugger.")
@@ -181,7 +204,7 @@ def parse_command_line():
" This is useful for connecting to a server running in a debugger.")
parser.add_option("--repeat", type="int", dest="repeat", metavar="N",
- help="Repeat the given suite(s) N times, or until one fails.")
+ help="Repeats the given suite(s) N times, or until one fails.")
parser.add_option("--reportFailureStatus", type="choice", action="store",
dest="report_failure_status", choices=("fail", "silentfail"),
@@ -191,7 +214,7 @@ def parse_command_line():
" never be silently ignored. Defaults to STATUS=%default.")
parser.add_option("--reportFile", dest="report_file", metavar="REPORT",
- help="Write a JSON file with test status and timing information.")
+ help="Writes a JSON file with test status and timing information.")
parser.add_option("--seed", type="int", dest="seed", metavar="SEED",
help=("Seed for the random number generator. Useful in combination with the"
@@ -212,38 +235,38 @@ def parse_command_line():
help="The write mode used by the mongo shell.")
parser.add_option("--shuffle", action="store_const", const="on", dest="shuffle",
- help=("Randomize the order in which tests are executed. This is equivalent"
+ help=("Randomizes the order in which tests are executed. This is equivalent"
" to specifying --shuffleMode=on."))
parser.add_option("--shuffleMode", type="choice", action="store", dest="shuffle",
choices=("on", "off", "auto"), metavar="ON|OFF|AUTO",
- help=("Control whether to randomize the order in which tests are executed."
+ help=("Controls whether to randomize the order in which tests are executed."
" Defaults to auto when not supplied. auto enables randomization in"
" all cases except when the number of jobs requested is 1."))
parser.add_option("--staggerJobs", type="choice", action="store", dest="stagger_jobs",
choices=("on", "off"), metavar="ON|OFF",
- help=("Enable or disable the stagger of launching resmoke jobs."
+ help=("Enables or disables the stagger of launching resmoke jobs."
" Defaults to %default."))
parser.add_option("--storageEngine", dest="storage_engine", metavar="ENGINE",
help="The storage engine used by dbtests and jstests.")
parser.add_option("--storageEngineCacheSizeGB", dest="storage_engine_cache_size",
- metavar="CONFIG", help="Set the storage engine cache size configuration"
+ metavar="CONFIG", help="Sets the storage engine cache size configuration"
" setting for all mongod's.")
parser.add_option("--tagFile", dest="tag_file", metavar="OPTIONS",
help="A YAML file that associates tests and tags.")
parser.add_option("--wiredTigerCollectionConfigString", dest="wt_coll_config", metavar="CONFIG",
- help="Set the WiredTiger collection configuration setting for all mongod's.")
+ help="Sets the WiredTiger collection configuration setting for all mongod's.")
parser.add_option("--wiredTigerEngineConfigString", dest="wt_engine_config", metavar="CONFIG",
- help="Set the WiredTiger engine configuration setting for all mongod's.")
+ help="Sets the WiredTiger engine configuration setting for all mongod's.")
parser.add_option("--wiredTigerIndexConfigString", dest="wt_index_config", metavar="CONFIG",
- help="Set the WiredTiger index configuration setting for all mongod's.")
+ help="Sets the WiredTiger index configuration setting for all mongod's.")
parser.add_option("--executor", dest="executor_file",
help="OBSOLETE: Superceded by --suites; specify --suites=SUITE path/to/test"
@@ -257,21 +280,34 @@ def parse_command_line():
parser.add_option_group(evergreen_options)
evergreen_options.add_option("--distroId", dest="distro_id", metavar="DISTRO_ID",
- help=("Set the identifier for the Evergreen distro running the"
+ help=("Sets the identifier for the Evergreen distro running the"
+ " tests."))
+
+ evergreen_options.add_option("--executionNumber", type="int", dest="execution_number",
+ metavar="EXECUTION_NUMBER",
+ help=("Sets the number for the Evergreen execution running the"
+ " tests."))
+
+ evergreen_options.add_option("--gitRevision", dest="git_revision", metavar="GIT_REVISION",
+ help=("Sets the git revision for the Evergreen task running the"
" tests."))
evergreen_options.add_option("--patchBuild", action="store_true", dest="patch_build",
- help=("Indicate that the Evergreen task running the tests is a"
+ help=("Indicates that the Evergreen task running the tests is a"
" patch build."))
+ evergreen_options.add_option("--projectName", dest="project_name", metavar="PROJECT_NAME",
+ help=("Sets the name of the Evergreen project running the tests."
+ ))
+
evergreen_options.add_option("--taskName", dest="task_name", metavar="TASK_NAME",
- help="Set the name of the Evergreen task running the tests.")
+ help="Sets the name of the Evergreen task running the tests.")
evergreen_options.add_option("--taskId", dest="task_id", metavar="TASK_ID",
- help="Set the Id of the Evergreen task running the tests.")
+ help="Sets the Id of the Evergreen task running the tests.")
evergreen_options.add_option("--variantName", dest="variant_name", metavar="VARIANT_NAME",
- help=("Set the name of the Evergreen build variant running the"
+ help=("Sets the name of the Evergreen build variant running the"
" tests."))
parser.set_defaults(logger_file="console",
@@ -322,13 +358,19 @@ def update_config_vars(values):
if values[dest] is not None:
config[config_var] = values[dest]
+ _config.ARCHIVE_FILE = config.pop("archiveFile")
+ _config.ARCHIVE_LIMIT_MB = config.pop("archiveLimitMb")
+ _config.ARCHIVE_LIMIT_TESTS = config.pop("archiveLimitTests")
_config.BASE_PORT = int(config.pop("basePort"))
_config.BUILDLOGGER_URL = config.pop("buildloggerUrl")
_config.DBPATH_PREFIX = _expand_user(config.pop("dbpathPrefix"))
_config.DBTEST_EXECUTABLE = _expand_user(config.pop("dbtest"))
_config.DRY_RUN = config.pop("dryRun")
_config.EVERGREEN_DISTRO_ID = config.pop("distroId")
+ _config.EVERGREEN_EXECUTION = config.pop("executionNumber")
_config.EVERGREEN_PATCH_BUILD = config.pop("patchBuild")
+ _config.EVERGREEN_PROJECT_NAME = config.pop("projectName")
+ _config.EVERGREEN_REVISION = config.pop("gitRevision")
_config.EVERGREEN_TASK_ID = config.pop("taskId")
_config.EVERGREEN_TASK_NAME = config.pop("taskName")
_config.EVERGREEN_VARIANT_NAME = config.pop("variantName")
diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py
index 95741119045..49df64ae2ef 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -8,6 +8,7 @@ import threading
import time
from . import fixtures
+from . import hook_test_archival as archival
from . import hooks as _hooks
from . import job as _job
from . import report as _report
@@ -34,7 +35,9 @@ class TestSuiteExecutor(object):
suite,
config=None,
fixture=None,
- hooks=None):
+ hooks=None,
+ archive_instance=None,
+ archive=None):
"""
Initializes the TestSuiteExecutor with the test suite to run.
"""
@@ -50,10 +53,15 @@ class TestSuiteExecutor(object):
self.hooks_config = utils.default_if_none(hooks, [])
self.test_config = utils.default_if_none(config, {})
+ self.archival = None
+ if archive_instance:
+ self.archival = archival.HookTestArchival(
+ suite, self.hooks_config, archive_instance, archive)
+
self._suite = suite
- # Only start as many jobs as we need. Note this means that the number of jobs we run may not
- # actually be _config.JOBS or self._suite.options.num_jobs.
+ # Only start as many jobs as we need. Note this means that the number of jobs we run may
+ # not actually be _config.JOBS or self._suite.options.num_jobs.
jobs_to_start = self._suite.options.num_jobs
num_tests = len(suite.tests)
@@ -269,7 +277,12 @@ class TestSuiteExecutor(object):
report = _report.TestReport(job_logger, self._suite.options)
- return _job.Job(job_logger, fixture, hooks, report, self._suite.options)
+ return _job.Job(job_logger,
+ fixture,
+ hooks,
+ report,
+ self.archival,
+ self._suite.options)
def _make_test_queue(self):
"""
diff --git a/buildscripts/resmokelib/testing/fixtures/masterslave.py b/buildscripts/resmokelib/testing/fixtures/masterslave.py
index 96b4ec5e96b..2b9c48de3e7 100644
--- a/buildscripts/resmokelib/testing/fixtures/masterslave.py
+++ b/buildscripts/resmokelib/testing/fixtures/masterslave.py
@@ -120,6 +120,9 @@ class MasterSlaveFixture(interface.ReplFixture):
def get_secondaries(self):
return [self.slave]
+ def get_dbpath(self):
+ return self._dbpath_prefix
+
def _new_mongod(self, mongod_logger, mongod_options):
"""
Returns a standalone.MongoDFixture with the specified logger and
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index 4677805151c..67d3e18f3aa 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -310,6 +310,9 @@ class ReplicaSetFixture(interface.ReplFixture):
def get_initial_sync_node(self):
return self.initial_sync_node
+ def get_dbpath(self):
+ return self._dbpath_prefix
+
def _new_mongod(self, index, replset_name):
"""
Returns a standalone.MongoDFixture configured to be used as a
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
index 3c185c2126a..a4c053b77a2 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -102,6 +102,9 @@ class ShardedClusterFixture(interface.Fixture):
for shard in self.shards:
shard.setup()
+ def get_dbpath(self):
+ return self._dbpath_prefix
+
def await_ready(self):
# Wait for the config server
if self.configsvr is not None:
diff --git a/buildscripts/resmokelib/testing/fixtures/standalone.py b/buildscripts/resmokelib/testing/fixtures/standalone.py
index 1fd8a80c7e1..4b99381d6ca 100644
--- a/buildscripts/resmokelib/testing/fixtures/standalone.py
+++ b/buildscripts/resmokelib/testing/fixtures/standalone.py
@@ -87,6 +87,9 @@ class MongoDFixture(interface.Fixture):
self.mongod = mongod
+ def get_dbpath(self):
+ return self._dbpath
+
def await_ready(self):
deadline = time.time() + MongoDFixture.AWAIT_READY_TIMEOUT_SECS
diff --git a/buildscripts/resmokelib/testing/hook_test_archival.py b/buildscripts/resmokelib/testing/hook_test_archival.py
new file mode 100644
index 00000000000..0503d7be50d
--- /dev/null
+++ b/buildscripts/resmokelib/testing/hook_test_archival.py
@@ -0,0 +1,116 @@
+"""
+Enables supports for archiving tests or hooks.
+"""
+
+from __future__ import absolute_import
+
+import os
+import threading
+
+from .. import config
+from .. import utils
+from ..utils import globstar
+
+
+class HookTestArchival(object):
+ """
+ Archives hooks and tests to S3.
+ """
+
+ def __init__(self, suite, hooks, archive_instance, archive_config):
+ self.archive_instance = archive_instance
+ archive_config = utils.default_if_none(archive_config, {})
+
+ self.on_success = archive_config.get("on_success", False)
+
+ self.tests = []
+ if "tests" in archive_config:
+ # 'tests' is either a list of tests to archive or a bool (archive all if True).
+ if not isinstance(archive_config["tests"], bool):
+ for test in archive_config["tests"]:
+ self.tests += globstar.glob(test)
+ elif archive_config["tests"]:
+ self.tests = suite.tests
+
+ self.hooks = []
+ if "hooks" in archive_config:
+ # 'hooks' is either a list of hooks to archive or a bool (archive all if True).
+ if not isinstance(archive_config["hooks"], bool):
+ self.hooks = archive_config["hooks"]
+ elif archive_config["hooks"]:
+ for hook in hooks:
+ self.hooks.append(hook["class"])
+
+ self._tests_repeat = {}
+ self._lock = threading.Lock()
+
+ def _should_archive(self, success):
+ """ Return True if failed test or 'on_success' is True. """
+ return not success or self.on_success
+
+ def _archive_hook(self, logger, hook, test, success):
+ """ Helper to archive hooks. """
+ hook_match = hook.REGISTERED_NAME in self.hooks
+ if not hook_match or not self._should_archive(success):
+ return
+
+ test_name = "{}:{}".format(test.short_name(), hook.REGISTERED_NAME)
+ self._archive_hook_or_test(logger, test_name, test)
+
+ def _archive_test(self, logger, test, success):
+ """ Helper to archive tests. """
+ test_name = test.test_name
+ test_match = False
+ for arch_test in self.tests:
+ # Ensure that the test_name is in the same format as the arch_test.
+ if os.path.normpath(test_name) == os.path.normpath(arch_test):
+ test_match = True
+ break
+ if not test_match or not self._should_archive(success):
+ return
+
+ self._archive_hook_or_test(logger, test_name, test)
+
+ def archive(self, logger, test, success, hook=None):
+ """ Archives data files for hooks or tests. """
+ if not config.ARCHIVE_FILE or not self.archive_instance:
+ return
+ if hook:
+ self._archive_hook(logger, hook, test, success)
+ else:
+ self._archive_test(logger, test, success)
+
+ def _archive_hook_or_test(self, logger, test_name, test):
+ """ Trigger archive of data files for a test or hook. """
+
+ with self._lock:
+ # Test repeat number is how many times the particular test has been archived.
+ if test_name not in self._tests_repeat:
+ self._tests_repeat[test_name] = 0
+ else:
+ self._tests_repeat[test_name] += 1
+ logger.info("Archiving data files for test %s", test_name)
+ # Normalize test path from a test or hook name.
+ test_path = \
+ test_name.replace("/", "_").replace("\\", "_").replace(".", "_").replace(":", "_")
+ file_name = "mongo-data-{}-{}-{}-{}.tgz".format(
+ config.EVERGREEN_TASK_ID,
+ test_path,
+ config.EVERGREEN_EXECUTION,
+ self._tests_repeat[test_name])
+ # Retrieve root directory for all dbPaths from fixture.
+ input_files = test.fixture.get_dbpath()
+ s3_bucket = config.ARCHIVE_BUCKET
+ s3_path = "{}/{}/{}/datafiles/{}".format(
+ config.EVERGREEN_PROJECT_NAME,
+ config.EVERGREEN_VARIANT_NAME,
+ config.EVERGREEN_REVISION,
+ file_name)
+ display_name = "Data files {} - Execution {} Repetition {}".format(
+ test_name,
+ config.EVERGREEN_EXECUTION,
+ self._tests_repeat[test_name])
+ status, message = self.archive_instance.archive_files_to_s3(
+ display_name, input_files, s3_bucket, s3_path)
+ if status:
+ logger.warning("Archive failed for %s: %s", test_name, message)
diff --git a/buildscripts/resmokelib/testing/job.py b/buildscripts/resmokelib/testing/job.py
index 9841c071ce7..a684ff24dc0 100644
--- a/buildscripts/resmokelib/testing/job.py
+++ b/buildscripts/resmokelib/testing/job.py
@@ -17,7 +17,7 @@ class Job(object):
Runs tests from a queue.
"""
- def __init__(self, logger, fixture, hooks, report, suite_options):
+ def __init__(self, logger, fixture, hooks, report, archival, suite_options):
"""
Initializes the job with the specified fixture and custom
behaviors.
@@ -27,6 +27,7 @@ class Job(object):
self.fixture = fixture
self.hooks = hooks
self.report = report
+ self.archival = archival
self.suite_options = suite_options
def __call__(self, queue, interrupt_flag, teardown_flag=None):
@@ -98,20 +99,36 @@ class Job(object):
self._run_hooks_before_tests(test)
test(self.report)
- if self.suite_options.fail_fast and not self.report.wasSuccessful():
- self.logger.info("%s failed, so stopping..." % (test.shortDescription()))
- raise errors.StopExecution("%s failed" % (test.shortDescription()))
-
- if not self.fixture.is_running():
- self.logger.error("%s marked as a failure because the fixture crashed during the test.",
- test.shortDescription())
- self.report.setFailure(test, return_code=2)
- # Always fail fast if the fixture fails.
- raise errors.StopExecution("%s not running after %s" %
- (self.fixture, test.shortDescription()))
+ try:
+ if self.suite_options.fail_fast and not self.report.wasSuccessful():
+ self.logger.info("%s failed, so stopping..." % (test.shortDescription()))
+ raise errors.StopExecution("%s failed" % (test.shortDescription()))
+
+ if not self.fixture.is_running():
+ self.logger.error(
+ "%s marked as a failure because the fixture crashed during the test.",
+ test.shortDescription())
+ self.report.setFailure(test, return_code=2)
+ # Always fail fast if the fixture fails.
+ raise errors.StopExecution("%s not running after %s" %
+ (self.fixture, test.shortDescription()))
+ finally:
+ success = self.report._find_test_info(test).status == "pass"
+ if self.archival:
+ self.archival.archive(self.logger, test, success)
self._run_hooks_after_tests(test)
+ def _run_hook(self, hook, hook_function, test):
+ """ Helper to run hook and archival. """
+ try:
+ success = False
+ hook_function(test, self.report)
+ success = True
+ finally:
+ if self.archival:
+ self.archival.archive(self.logger, test, success, hook=hook)
+
def _run_hooks_before_tests(self, test):
"""
Runs the before_test method on each of the hooks.
@@ -119,10 +136,9 @@ class Job(object):
Swallows any TestFailure exceptions if set to continue on
failure, and reraises any other exceptions.
"""
-
try:
for hook in self.hooks:
- hook.before_test(test, self.report)
+ self._run_hook(hook, hook.before_test, test)
except errors.StopExecution:
raise
@@ -156,7 +172,7 @@ class Job(object):
"""
try:
for hook in self.hooks:
- hook.after_test(test, self.report)
+ self._run_hook(hook, hook.after_test, test)
except errors.StopExecution:
raise
diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py
index fa782f34301..57dc8705319 100644
--- a/buildscripts/resmokelib/utils/__init__.py
+++ b/buildscripts/resmokelib/utils/__init__.py
@@ -11,6 +11,8 @@ import sys
import yaml
+from . import archival
+
@contextlib.contextmanager
def open_or_use_stdout(filename):
diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py
index baafe90778c..9d31e053846 100644
--- a/buildscripts/resmokelib/utils/archival.py
+++ b/buildscripts/resmokelib/utils/archival.py
@@ -9,11 +9,14 @@ import collections
import json
import math
import os
+import sys
import tarfile
import tempfile
import threading
import time
+_IS_WINDOWS = sys.platform == "win32" or sys.platform == "cygwin"
+
UploadArgs = collections.namedtuple(
"UploadArgs",
["archival_file",
@@ -128,6 +131,10 @@ class Archival(object):
Returns status and message, where message contains information if status is non-0.
"""
+ # TODO: Support archival on Windows (SERVER-33144).
+ if _IS_WINDOWS:
+ return 1, "Archival not supported on Windows"
+
start_time = time.time()
with self._lock:
if not input_files:
@@ -135,10 +142,10 @@ class Archival(object):
message = "No input_files specified"
elif self.limit_size_mb and self.size_mb >= self.limit_size_mb:
status = 1
- message = "Files not archived, limit size {}MB reached".format(self.limit_size_mb)
+ message = "Files not archived, {}MB size limit reached".format(self.limit_size_mb)
elif self.limit_files and self.num_files >= self.limit_files:
status = 1
- message = "Files not archived, limit files {} reached".format(self.limit_files)
+ message = "Files not archived, {} file limit reached".format(self.limit_files)
else:
status, message, file_size_mb = self._archive_files(
display_name,
@@ -203,7 +210,10 @@ class Archival(object):
logger.exception("Upload to S3 error %s", err)
if upload_args.delete_file:
- os.remove(upload_args.local_file)
+ try:
+ os.remove(upload_args.local_file)
+ except Exception as err:
+ logger.exception("Upload to S3 file removal error %s", err)
remote_file = "https://s3.amazonaws.com/{}/{}".format(
upload_args.s3_bucket, upload_args.s3_path)
@@ -232,28 +242,27 @@ class Archival(object):
size_mb = 0
# Tar/gzip to a temporary file.
- temp_file = tempfile.NamedTemporaryFile(suffix=".tgz", delete=False)
- local_file = temp_file.name
+ _, temp_file = tempfile.mkstemp(suffix=".tgz")
# Check if there is sufficient space for the temporary tgz file.
- if file_list_size(input_files) > free_space(local_file):
- os.remove(local_file)
+ if file_list_size(input_files) > free_space(temp_file):
+ os.remove(temp_file)
return 1, "Insufficient space for {}".format(message), 0
try:
- with tarfile.open(local_file, "w:gz") as tar_handle:
+ with tarfile.open(temp_file, "w:gz") as tar_handle:
for input_file in input_files:
tar_handle.add(input_file)
except (IOError, tarfile.TarError) as err:
- message = str(err)
- status = 1
+ os.remove(temp_file)
+ return 1, str(err), 0
- # Round up the size of archive.
- size_mb = int(math.ceil(float(file_list_size(local_file)) / (1024 * 1024)))
+ # Round up the size of the archive.
+ size_mb = int(math.ceil(float(file_list_size(temp_file)) / (1024 * 1024)))
self._upload_queue.put(UploadArgs(
self.archival_json_file,
display_name,
- local_file,
+ temp_file,
"application/x-gzip",
s3_bucket,
s3_path,
diff --git a/etc/evergreen.yml b/etc/evergreen.yml
index ea4d31e91f5..8fbca956428 100644
--- a/etc/evergreen.yml
+++ b/etc/evergreen.yml
@@ -583,6 +583,7 @@ functions:
# activate the virtualenv if it has been set up
${activate_virtualenv}
+ pip install boto3
# Set the TMPDIR environment variable to be a directory in the task's working
# directory so that temporary files created by processes spawned by resmoke.py get
@@ -687,8 +688,12 @@ functions:
--staggerJobs=on \
--taskId=${task_id} \
--taskName=${task_name} \
+ --executionNumber=${execution} \
+ --projectName=${project} \
--variantName=${build_variant} \
--distroId=${distro_id} \
+ --gitRevision=${revision} \
+ --archiveFile=archive.json \
--reportFile=report.json
resmoke_exit_code=$?
set -o errexit
@@ -1783,32 +1788,6 @@ post:
rm -rf /data/charybdefs
fi
- # Gather and archive FTDC data.
- - command: shell.exec
- params:
- working_dir: src
- script: |
- # Using shell and tar to recurse properly to all possible diagnostic.data subdirectories.
- # The archive.targz_pack command is not being used here because the command's glob support
- # did not allow us to gather all directories.
- if [ -d /data/db ]; then
- file_list=$(cd /data/db && find . -type d -name diagnostic.data)
- if [ -n "$file_list" ]; then
- ${tar|tar} cvzf diagnostic-data.tgz -C /data/db $file_list
- fi
- fi
- - command: s3.put
- params:
- aws_key: ${aws_key}
- aws_secret: ${aws_secret}
- local_file: src/diagnostic-data.tgz
- remote_file: ${project}/${build_variant}/${revision}/ftdc/mongo-diagnostic-data-${task_id}-${execution}.tgz
- bucket: mciuploads
- permissions: public-read
- content_type: ${content_type|application/x-gzip}
- display_name: FTDC Diagnostic Data - Execution ${execution}
- optional: true
-
# Archive remote EC2 monitor files.
- command: s3.put
params: