diff options
Diffstat (limited to 'buildscripts/resmokelib')
64 files changed, 947 insertions, 1335 deletions
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py index 4c8f0a4e720..66753c389db 100644 --- a/buildscripts/resmokelib/config.py +++ b/buildscripts/resmokelib/config.py @@ -1,6 +1,4 @@ -""" -Configuration options for resmoke.py. -""" +"""Configuration options for resmoke.py.""" from __future__ import absolute_import @@ -113,19 +111,16 @@ _SuiteOptions = collections.namedtuple("_SuiteOptions", [ class SuiteOptions(_SuiteOptions): - """ - A class for representing top-level options to resmoke.py that can also be set at the - suite-level. - """ + """Represent top-level options to resmoke.py that can also be set at the suite-level.""" INHERIT = object() ALL_INHERITED = None @classmethod def combine(cls, *suite_options_list): - """ - Returns a SuiteOptions instance representing the combination of all SuiteOptions in - 'suite_options_list'. + """Return SuiteOptions instance. + + This object represents the combination of all SuiteOptions in 'suite_options_list'. """ combined_options = cls.ALL_INHERITED._asdict() @@ -158,8 +153,9 @@ class SuiteOptions(_SuiteOptions): return cls(**combined_options) def resolve(self): - """ - Returns a SuiteOptions instance representing the options overridden at the suite-level and + """Return a SuiteOptions instance. + + This represents the options overridden at the suite-level and the inherited options from the top-level. """ @@ -183,8 +179,8 @@ class SuiteOptions(_SuiteOptions): return SuiteOptions(**options) -SuiteOptions.ALL_INHERITED = SuiteOptions(**dict( - zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT)))) +SuiteOptions.ALL_INHERITED = SuiteOptions( # type: ignore + **dict(zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT)))) ## # Variables that are set by the user at the command line or with --options. diff --git a/buildscripts/resmokelib/core/__init__.py b/buildscripts/resmokelib/core/__init__.py index 29a19a52500..78a8b4924b8 100644 --- a/buildscripts/resmokelib/core/__init__.py +++ b/buildscripts/resmokelib/core/__init__.py @@ -1,3 +1,4 @@ +"""Resmokelib core module.""" from __future__ import absolute_import from . import process diff --git a/buildscripts/resmokelib/core/network.py b/buildscripts/resmokelib/core/network.py index eda2c95417e..f42e6a86d85 100644 --- a/buildscripts/resmokelib/core/network.py +++ b/buildscripts/resmokelib/core/network.py @@ -1,7 +1,4 @@ -""" -Class used to allocate ports for use by various mongod and mongos -processes involved in running the tests. -""" +"""Class used to allocate ports for mongod and mongos processes involved in running the tests.""" from __future__ import absolute_import @@ -14,16 +11,14 @@ from .. import errors def _check_port(func): - """ - A decorator that verifies the port returned by the wrapped function - is in the valid range. + """Provide decorator that verifies the port returned by the wrapped function is in range. - Returns the port if it is valid, and raises a PortAllocationError - otherwise. + Returns the port if it is valid, and raises a PortAllocationError otherwise. """ @functools.wraps(func) def wrapper(*args, **kwargs): + """Provide wrapper function.""" port = func(*args, **kwargs) if port < 0: @@ -39,8 +34,7 @@ def _check_port(func): class PortAllocator(object): - """ - This class is responsible for allocating ranges of ports. + """Class responsible for allocating ranges of ports. It reserves a range of ports for each job with the first part of that range used for the fixture started by that job, and the second @@ -62,13 +56,12 @@ class PortAllocator(object): _NUM_USED_PORTS_LOCK = threading.Lock() # Used to keep track of how many ports a fixture has allocated. - _NUM_USED_PORTS = collections.defaultdict(int) + _NUM_USED_PORTS = collections.defaultdict(int) # type: ignore @classmethod @_check_port def next_fixture_port(cls, job_num): - """ - Returns the next port for a fixture to use. + """Return the next port for a fixture to use. Raises a PortAllocationError if the fixture has requested more ports than are reserved per job, or if the next port is not a @@ -91,9 +84,7 @@ class PortAllocator(object): @classmethod @_check_port def min_test_port(cls, job_num): - """ - For the given job, returns the lowest port that is reserved for - use by tests. + """Return the lowest port that is reserved for use by tests, for specified job. Raises a PortAllocationError if that port is higher than the maximum port. @@ -103,9 +94,7 @@ class PortAllocator(object): @classmethod @_check_port def max_test_port(cls, job_num): - """ - For the given job, returns the highest port that is reserved - for use by tests. + """Return the highest port that is reserved for use by tests, for specified job. Raises a PortAllocationError if that port is higher than the maximum port. @@ -115,8 +104,7 @@ class PortAllocator(object): @classmethod def reset(cls): - """ - Resets the internal state of the PortAllocator. + """Reset the internal state of the PortAllocator. This method is intended to be called each time resmoke.py starts a new test suite. diff --git a/buildscripts/resmokelib/core/pipe.py b/buildscripts/resmokelib/core/pipe.py index bb080721b2d..5aba2ed9a81 100644 --- a/buildscripts/resmokelib/core/pipe.py +++ b/buildscripts/resmokelib/core/pipe.py @@ -1,6 +1,7 @@ """ -Helper class to read output of a subprocess. Used to avoid deadlocks -from the pipe buffer filling up and blocking the subprocess while it's +Helper class to read output of a subprocess. + +Used to avoid deadlocks from the pipe buffer filling up and blocking the subprocess while it's being waited on. """ @@ -9,11 +10,8 @@ from __future__ import absolute_import import threading -class LoggerPipe(threading.Thread): - """ - Asynchronously reads the output of a subprocess and sends it to a - logger. - """ +class LoggerPipe(threading.Thread): # pylint: disable=too-many-instance-attributes + """Asynchronously reads the output of a subprocess and sends it to a logger.""" # The start() and join() methods are not intended to be called directly on the LoggerPipe # instance. Since we override them for that effect, the super's version are preserved here. @@ -21,10 +19,7 @@ class LoggerPipe(threading.Thread): __join = threading.Thread.join def __init__(self, logger, level, pipe_out): - """ - Initializes the LoggerPipe with the specified logger, logging - level to use, and pipe to read from. - """ + """Initialize the LoggerPipe with the specified arguments.""" threading.Thread.__init__(self) # Main thread should not call join() when exiting @@ -43,12 +38,11 @@ class LoggerPipe(threading.Thread): LoggerPipe.__start(self) def start(self): + """Start not implemented.""" raise NotImplementedError("start should not be called directly") def run(self): - """ - Reads the output from 'pipe_out' and logs each line to 'logger'. - """ + """Read the output from 'pipe_out' and logs each line to 'logger'.""" with self.__lock: self.__started = True @@ -70,14 +64,17 @@ class LoggerPipe(threading.Thread): self.__condition.notify_all() def join(self, timeout=None): + """Join not implemented.""" raise NotImplementedError("join should not be called directly") def wait_until_started(self): + """Wait until started.""" with self.__lock: while not self.__started: self.__condition.wait() def wait_until_finished(self): + """Wait until finished.""" with self.__lock: while not self.__finished: self.__condition.wait() diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py index 42f9454bd91..4b94b2dd016 100644 --- a/buildscripts/resmokelib/core/process.py +++ b/buildscripts/resmokelib/core/process.py @@ -1,5 +1,4 @@ -""" -A more reliable way to create and destroy processes. +"""A more reliable way to create and destroy processes. Uses job objects when running on Windows to ensure that all created processes are terminated. @@ -30,12 +29,12 @@ if os.name == "posix" and sys.version_info[0] == 2: warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't" " available. When using the subprocess module, a child process may trigger" " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning) - import subprocess + import subprocess # type: ignore else: import subprocess -from . import pipe -from .. import utils +from . import pipe # pylint: disable=wrong-import-position +from .. import utils # pylint: disable=wrong-import-position # Attempt to avoid race conditions (e.g. hangs caused by a file descriptor being left open) when # starting subprocesses concurrently from multiple threads by guarding calls to subprocess.Popen() @@ -87,15 +86,12 @@ if sys.platform == "win32": class Process(object): - """ - Wrapper around subprocess.Popen class. - """ + """Wrapper around subprocess.Popen class.""" + + # pylint: disable=protected-access def __init__(self, logger, args, env=None, env_vars=None): - """ - Initializes the process with the specified logger, arguments, - and environment. - """ + """Initialize the process with the specified logger, arguments, and environment.""" # Ensure that executable files that don't already have an # extension on Windows have a ".exe" extension. @@ -115,10 +111,7 @@ class Process(object): self._stderr_pipe = None def start(self): - """ - Starts the process and the logger pipes for its stdout and - stderr. - """ + """Start the process and the logger pipes for its stdout and stderr.""" creation_flags = 0 if sys.platform == "win32" and _JOB_OBJECT is not None: @@ -158,12 +151,12 @@ class Process(object): if return_code == win32con.STILL_ACTIVE: raise - def stop(self, kill=False): + def stop(self, kill=False): # pylint: disable=too-many-branches """Terminate the process.""" if sys.platform == "win32": # Attempt to cleanly shutdown mongod. - if not kill and len(self.args) > 0 and self.args[0].find("mongod") != -1: + if not kill and self.args and self.args[0].find("mongod") != -1: mongo_signal_handle = None try: mongo_signal_handle = win32event.OpenEvent( @@ -218,13 +211,11 @@ class Process(object): raise def poll(self): + """Poll.""" return self._process.poll() def wait(self): - """ - Waits until the process has terminated and all output has been - consumed by the logger pipes. - """ + """Wait until process has terminated and all output has been consumed by the logger pipes.""" return_code = self._process.wait() @@ -236,9 +227,7 @@ class Process(object): return return_code def as_command(self): - """ - Returns an equivalent command line invocation of the process. - """ + """Return an equivalent command line invocation of the process.""" default_env = os.environ env_diff = self.env.copy() diff --git a/buildscripts/resmokelib/core/programs.py b/buildscripts/resmokelib/core/programs.py index b868aa8a1ba..db7a98a52ce 100644 --- a/buildscripts/resmokelib/core/programs.py +++ b/buildscripts/resmokelib/core/programs.py @@ -1,5 +1,4 @@ -""" -Utility functions to create MongoDB processes. +"""Utility functions to create MongoDB processes. Handles all the nitty-gritty parameter conversion. """ @@ -16,11 +15,9 @@ from .. import config from .. import utils -def mongod_program(logger, executable=None, process_kwargs=None, **kwargs): - """ - Returns a Process instance that starts a mongod executable with - arguments constructed from 'kwargs'. - """ +def mongod_program( # pylint: disable=too-many-branches + logger, executable=None, process_kwargs=None, **kwargs): + """Return a Process instance that starts mongod arguments constructed from 'kwargs'.""" executable = utils.default_if_none(executable, config.DEFAULT_MONGOD_EXECUTABLE) args = [executable] @@ -117,10 +114,7 @@ def mongod_program(logger, executable=None, process_kwargs=None, **kwargs): def mongos_program(logger, executable=None, process_kwargs=None, **kwargs): - """ - Returns a Process instance that starts a mongos executable with - arguments constructed from 'kwargs'. - """ + """Return a Process instance that starts a mongos with arguments constructed from 'kwargs'.""" executable = utils.default_if_none(executable, config.DEFAULT_MONGOS_EXECUTABLE) args = [executable] @@ -143,11 +137,12 @@ def mongos_program(logger, executable=None, process_kwargs=None, **kwargs): return _process.Process(logger, args, **process_kwargs) -def mongo_shell_program(logger, executable=None, connection_string=None, filename=None, - process_kwargs=None, **kwargs): - """ - Returns a Process instance that starts a mongo shell with the given connection string and - arguments constructed from 'kwargs'. +def mongo_shell_program( # pylint: disable=too-many-branches,too-many-locals,too-many-statements + logger, executable=None, connection_string=None, filename=None, process_kwargs=None, + **kwargs): + """Return a Process instance that starts a mongo shell. + + The shell is started with the given connection string and arguments constructed from 'kwargs'. """ connection_string = utils.default_if_none(config.SHELL_CONN_STRING, connection_string) @@ -256,8 +251,7 @@ def mongo_shell_program(logger, executable=None, connection_string=None, filenam def _format_shell_vars(sb, path, value): - """ - Formats 'value' in a way that can be passed to --eval. + """Format 'value' in a way that can be passed to --eval. If 'value' is a dictionary, then it is unrolled into the creation of a new JSON object with properties assigned for each key of the @@ -277,10 +271,7 @@ def _format_shell_vars(sb, path, value): def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **kwargs): - """ - Returns a Process instance that starts a dbtest executable with - arguments constructed from 'kwargs'. - """ + """Return a Process instance that starts a dbtest with arguments constructed from 'kwargs'.""" executable = utils.default_if_none(executable, config.DEFAULT_DBTEST_EXECUTABLE) args = [executable] @@ -295,10 +286,11 @@ def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, ** def generic_program(logger, args, process_kwargs=None, **kwargs): - """ - Returns a Process instance that starts an arbitrary executable with - arguments constructed from 'kwargs'. The args parameter is an array - of strings containing the command to execute. + """Return a Process instance that starts an arbitrary executable. + + The executable arguments are constructed from 'kwargs'. + + The args parameter is an array of strings containing the command to execute. """ if not utils.is_string_list(args): @@ -311,9 +303,9 @@ def generic_program(logger, args, process_kwargs=None, **kwargs): def _format_test_data_set_parameters(set_parameters): - """ - Converts key-value pairs from 'set_parameters' into the comma - delimited list format expected by the parser in servers.js. + """Convert key-value pairs from 'set_parameters' into a comma delimited list format. + + The format is used by the parser in servers.js. WARNING: the parsing logic in servers.js is very primitive. Non-scalar options such as logComponentVerbosity will not work @@ -332,9 +324,9 @@ def _format_test_data_set_parameters(set_parameters): def _apply_set_parameters(args, set_parameter): - """ - Converts key-value pairs from 'kwargs' into --setParameter key=value - arguments to an executable and appends them to 'args'. + """Convert key-value pairs from 'kwargs' into --setParameter key=value arguments. + + This result is appended to 'args'. """ for param_name in set_parameter: @@ -347,10 +339,9 @@ def _apply_set_parameters(args, set_parameter): def _apply_kwargs(args, kwargs): - """ - Converts key-value pairs from 'kwargs' into --key value arguments - to an executable and appends them to 'args'. + """Convert key-value pairs from 'kwargs' into --key value arguments. + This result is appended to 'args'. A --flag without a value is represented with the empty string. """ @@ -363,9 +354,7 @@ def _apply_kwargs(args, kwargs): def _set_keyfile_permissions(opts): - """ - Change the permissions of keyfiles in 'opts' to 600, i.e. only the - user can read and write the file. + """Change the permissions of keyfiles in 'opts' to 600, (only user can read and write the file). This necessary to avoid having the mongod/mongos fail to start up because "permissions on the keyfiles are too open". diff --git a/buildscripts/resmokelib/errors.py b/buildscripts/resmokelib/errors.py index 6ec329c14b5..8f49a567a79 100644 --- a/buildscripts/resmokelib/errors.py +++ b/buildscripts/resmokelib/errors.py @@ -1,59 +1,47 @@ -""" -Exceptions raised by resmoke.py. -""" +"""Exceptions raised by resmoke.py.""" -class ResmokeError(Exception): - """ - Base class for all resmoke.py exceptions. - """ +class ResmokeError(Exception): # noqa: D204 + """Base class for all resmoke.py exceptions.""" pass -class SuiteNotFound(ResmokeError): - """ - A suite that isn't recognized was specified. - """ +class SuiteNotFound(ResmokeError): # noqa: D204 + """A suite that isn't recognized was specified.""" pass -class StopExecution(ResmokeError): - """ - Exception that is raised when resmoke.py should stop executing tests - if failing fast is enabled. - """ +class StopExecution(ResmokeError): # noqa: D204 + """Exception raised when resmoke.py should stop executing tests if failing fast is enabled.""" pass -class UserInterrupt(StopExecution): - """ - Exception that is raised when a user signals resmoke.py to - unconditionally stop executing tests. - """ +class UserInterrupt(StopExecution): # noqa: D204 + """Exception raised when a user signals resmoke.py to unconditionally stop executing tests.""" pass -class TestFailure(ResmokeError): - """ - Exception that is raised by a hook in the after_test method if it - determines the the previous test should be marked as a failure. +class TestFailure(ResmokeError): # noqa: D204 + """Exception raised by a hook in the after_test method. + + Raised if it determines the the previous test should be marked as a failure. """ pass -class ServerFailure(TestFailure): - """ - Exception that is raised by a hook in the after_test method if it - detects that the fixture did not exit cleanly and should be marked +class ServerFailure(TestFailure): # noqa: D204 + """Exception raised by a hook in the after_test method. + + Raised if it detects that the fixture did not exit cleanly and should be marked as a failure. """ pass -class PortAllocationError(ResmokeError): - """ - Exception that is raised by the PortAllocator if a port is requested - outside of the range of valid ports, or if a fixture requests more - ports than were reserved for that job. +class PortAllocationError(ResmokeError): # noqa: D204 + """Exception that is raised by the PortAllocator. + + Raised if a port is requested outside of the range of valid ports, or if a + fixture requests more ports than were reserved for that job. """ pass diff --git a/buildscripts/resmokelib/logging/__init__.py b/buildscripts/resmokelib/logging/__init__.py index 816a62004d8..d0b4a48ac57 100644 --- a/buildscripts/resmokelib/logging/__init__.py +++ b/buildscripts/resmokelib/logging/__init__.py @@ -1,6 +1,4 @@ -""" -Extension to the logging package to support buildlogger. -""" +"""Extension to the logging package to support buildlogger.""" from __future__ import absolute_import diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py index 56a5defc5a4..5fa52c85c69 100644 --- a/buildscripts/resmokelib/logging/buildlogger.py +++ b/buildscripts/resmokelib/logging/buildlogger.py @@ -1,6 +1,4 @@ -""" -Defines handlers for communicating with a buildlogger server. -""" +"""Define handlers for communicating with a buildlogger server.""" from __future__ import absolute_import @@ -27,19 +25,17 @@ BUILDLOGGER_FALLBACK = None def _log_on_error(func): - """ - A decorator that causes any exceptions to be logged by the - "buildlogger" Logger instance. + """Provide decorator that causes exceptions to be logged by the "buildlogger" Logger instance. - Returns the wrapped function's return value, or None if an error - was encountered. + Return the wrapped function's return value, or None if an error was encountered. """ @functools.wraps(func) def wrapper(*args, **kwargs): + """Provide wrapper function.""" try: return func(*args, **kwargs) - except: + except: # pylint: disable=bare-except BUILDLOGGER_FALLBACK.exception("Encountered an error.") return None @@ -50,9 +46,8 @@ class _LogsSplitter(object): """Class with static methods used to split list of log lines into smaller batches.""" @staticmethod - def split_logs(log_lines, max_size): - """ - Splits the log lines into batches of size less than or equal to max_size. + def split_logs(log_lines, max_size): # noqa: D406,D407,D411,D413 + """Split the log lines into batches of size less than or equal to max_size. Args: log_lines: A list of log lines. @@ -65,8 +60,8 @@ class _LogsSplitter(object): return [log_lines] def line_size(line): - """ - Computes the encoded JSON size of a log line as part of an array. + """Compute the encoded JSON size of a log line as part of an array. + 2 is added to each string size to account for the array representation of the logs, as each line is preceded by a '[' or a space and followed by a ',' or a ']'. """ @@ -88,17 +83,11 @@ class _LogsSplitter(object): class _BaseBuildloggerHandler(handlers.BufferedHandler): - """ - Base class of the buildlogger handler for the global logs and the - handler for the test logs. - """ + """Base class of the buildlogger handler for global logs and handler for test logs.""" def __init__(self, build_config, endpoint, capacity=_SEND_AFTER_LINES, interval_secs=_SEND_AFTER_SECS): - """ - Initializes the buildlogger handler with the build id and - credentials. - """ + """Initialize the buildlogger handler with the build id and credentials.""" handlers.BufferedHandler.__init__(self, capacity, interval_secs) @@ -111,9 +100,9 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler): self.max_size = None def process_record(self, record): - """ - Returns a tuple of the time the log record was created, and the - message because the buildlogger expects the log messages + """Return a tuple of the time the log record was created, and the message. + + This is necessary because the buildlogger expects the log messages to be formatted in JSON as: [ [ <log-time-1>, <log-message-1> ], @@ -124,14 +113,11 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler): return (record.created, msg) def post(self, *args, **kwargs): - """ - Convenience method for subclasses to use when making POST requests. - """ + """Provide convenience method for subclasses to use when making POST requests.""" return self.http_handler.post(*args, **kwargs) - def _append_logs(self, log_lines): - """ - Sends a POST request to the handlers endpoint with the logs that have been captured. + def _append_logs(self, log_lines): # noqa: D406,D407,D413 + """Send a POST request to the handlers endpoint with the logs that have been captured. Returns: The number of log lines that have been successfully sent. @@ -145,10 +131,8 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler): break return lines_sent - def __append_logs_chunk(self, log_lines_chunk): - """ - Sends a log lines chunk, handles 413 Request Entity Too Large errors and retries - if necessary. + def __append_logs_chunk(self, log_lines_chunk): # noqa: D406,D407,D413 + """Send log lines chunk, handle 413 Request Entity Too Large errors & retry, if necessary. Returns: The number of log lines that have been successfully sent. @@ -173,14 +157,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler): return self._append_logs(log_lines_chunk) BUILDLOGGER_FALLBACK.exception("Encountered an error.") return 0 - except: + except: # pylint: disable=bare-except BUILDLOGGER_FALLBACK.exception("Encountered an error.") return 0 def _flush_buffer_with_lock(self, buf, close_called): - """ - Ensures all logging output has been flushed to the buildlogger - server. + """Ensure all logging output has been flushed to the buildlogger server. If _append_logs() returns false, then the log messages are added to a separate buffer and retried the next time flush() is @@ -205,13 +187,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler): class BuildloggerTestHandler(_BaseBuildloggerHandler): - """ - Buildlogger handler for the test logs. - """ + """Buildlogger handler for the test logs.""" - def __init__(self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES, - interval_secs=_SEND_AFTER_SECS): - """Initializes the buildlogger handler with the credentials, build id, and test id.""" + def __init__( # pylint: disable=too-many-arguments + self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES, + interval_secs=_SEND_AFTER_SECS): + """Initialize the buildlogger handler with the credentials, build id, and test id.""" endpoint = APPEND_TEST_LOGS_ENDPOINT % { "build_id": build_id, "test_id": test_id, @@ -220,19 +201,14 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler): @_log_on_error def _finish_test(self, failed=False): - """ - Sends a POST request to the APPEND_TEST_LOGS_ENDPOINT with the - test status. - """ + """Send a POST request to the APPEND_TEST_LOGS_ENDPOINT with the test status.""" self.post(self.endpoint, headers={ "X-Sendlogs-Test-Done": "true", "X-Sendlogs-Test-Failed": "true" if failed else "false", }) def close(self): - """ - Closes the buildlogger handler. - """ + """Close the buildlogger handler.""" _BaseBuildloggerHandler.close(self) @@ -241,13 +217,11 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler): class BuildloggerGlobalHandler(_BaseBuildloggerHandler): - """ - Buildlogger handler for the global logs. - """ + """Buildlogger handler for the global logs.""" def __init__(self, build_config, build_id, capacity=_SEND_AFTER_LINES, interval_secs=_SEND_AFTER_SECS): - """Initializes the buildlogger handler with the credentials and build id.""" + """Initialize the buildlogger handler with the credentials and build id.""" endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id} _BaseBuildloggerHandler.__init__(self, build_config, endpoint, capacity, interval_secs) @@ -261,6 +235,7 @@ class BuildloggerServer(object): @_log_on_error def __init__(self): + """Initialize BuildloggerServer.""" tmp_globals = {} self.config = {} execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config) @@ -277,9 +252,7 @@ class BuildloggerServer(object): @_log_on_error def new_build_id(self, suffix): - """ - Returns a new build id for sending global logs to. - """ + """Return a new build id for sending global logs to.""" username = self.config["username"] password = self.config["password"] builder = "%s_%s" % (self.config["builder"], suffix) @@ -298,9 +271,7 @@ class BuildloggerServer(object): @_log_on_error def new_test_id(self, build_id, test_filename, test_command): - """ - Returns a new test id for sending test logs to. - """ + """Return a new test id for sending test logs to.""" handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL, username=self.config["username"], password=self.config["password"]) @@ -317,19 +288,23 @@ class BuildloggerServer(object): return response["id"] def get_global_handler(self, build_id, handler_info): + """Return the global handler.""" return BuildloggerGlobalHandler(self.config, build_id, **handler_info) def get_test_handler(self, build_id, test_id, handler_info): + """Return the test handler.""" return BuildloggerTestHandler(self.config, build_id, test_id, **handler_info) @staticmethod def get_build_log_url(build_id): + """Return the build log URL.""" base_url = _config.BUILDLOGGER_URL.rstrip("/") endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id} return "%s/%s" % (base_url, endpoint.strip("/")) @staticmethod def get_test_log_url(build_id, test_id): + """Return the test log URL.""" base_url = _config.BUILDLOGGER_URL.rstrip("/") endpoint = APPEND_TEST_LOGS_ENDPOINT % {"build_id": build_id, "test_id": test_id} return "%s/%s" % (base_url, endpoint.strip("/")) diff --git a/buildscripts/resmokelib/logging/flush.py b/buildscripts/resmokelib/logging/flush.py index 5b2b488e51a..f5c2b356468 100644 --- a/buildscripts/resmokelib/logging/flush.py +++ b/buildscripts/resmokelib/logging/flush.py @@ -1,6 +1,6 @@ -""" -Manages a thread responsible for periodically calling flush() on -logging.Handler instances used to send logs to buildlogger. +"""Manage a thread responsible for periodically calling flush() on logging.Handler instances. + +These instances are used to send logs to buildlogger. """ from __future__ import absolute_import @@ -16,11 +16,9 @@ _FLUSH_THREAD = None def start_thread(): - """ - Starts the flush thread. - """ + """Start the flush thread.""" - global _FLUSH_THREAD + global _FLUSH_THREAD # pylint: disable=global-statement with _FLUSH_THREAD_LOCK: if _FLUSH_THREAD is not None: raise ValueError("FlushThread has already been started") @@ -30,9 +28,7 @@ def start_thread(): def stop_thread(): - """ - Signals the flush thread to stop and waits until it does. - """ + """Signal the flush thread to stop and wait until it does.""" with _FLUSH_THREAD_LOCK: if _FLUSH_THREAD is None: @@ -44,12 +40,9 @@ def stop_thread(): def flush_after(handler, delay): - """ - Adds 'handler' to the queue so that it is flushed after 'delay' - seconds by the flush thread. + """Add 'handler' to the queue so that it is flushed after 'delay' seconds by the flush thread. - Returns the scheduled event which may be used for later cancellation - (see cancel()). + Return the scheduled event which may be used for later cancellation (see cancel()). """ if not isinstance(handler, logging.Handler): @@ -59,12 +52,9 @@ def flush_after(handler, delay): def close_later(handler): - """ - Adds 'handler' to the queue so that it is closed later by the flush - thread. + """Add 'handler' to the queue so that it is closed later by the flush thread. - Returns the scheduled event which may be used for later cancelation - (see cancel()). + Return the scheduled event which may be used for later cancelation (see cancel()). """ if not isinstance(handler, logging.Handler): @@ -78,36 +68,27 @@ def close_later(handler): def cancel(event): - """ - Attempts to cancel the specified event. + """Attempt to cancel the specified event. - Returns true if the event was successfully canceled, and returns - false otherwise. + Returns true if the event was successfully canceled, and returns false otherwise. """ return _FLUSH_THREAD.cancel_event(event) class _FlushThread(threading.Thread): - """ - Asynchronously flushes and closes logging handlers. - """ + """Asynchronously flush and close logging handlers.""" _TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run) def __init__(self): - """ - Initializes the flush thread. - """ + """Initialize the flush thread.""" threading.Thread.__init__(self, name="FlushThread") # Do not wait to flush the logs if interrupted by the user. self.daemon = True def interruptible_sleep(secs): - """ - Waits up to 'secs' seconds or for the - 'self.__schedule_updated' event to be set. - """ + """Wait up to 'secs' seconds or for the 'self.__schedule_updated' event to be set.""" # Setting 'self.__schedule_updated' in submit() will cause the scheduler to return early # from its 'delayfunc'. This makes it so that if a new event is scheduled with @@ -121,9 +102,7 @@ class _FlushThread(threading.Thread): self.__terminated = threading.Event() def run(self): - """ - Continuously flushes and closes logging handlers. - """ + """Continuously flush and close logging handlers.""" try: while not (self.__should_stop.is_set() and self.__scheduler.empty()): @@ -146,9 +125,9 @@ class _FlushThread(threading.Thread): self.__terminated.set() def signal_shutdown(self): - """ - Indicates to the flush thread that it should exit once its - current queue of logging handlers are flushed and closed. + """Indicate to the flush thread that it should exit. + + This will happen once its current queue of logging handlers are flushed and closed. """ self.__should_stop.set() @@ -158,21 +137,16 @@ class _FlushThread(threading.Thread): self.__schedule_updated.set() def await_shutdown(self): - """ - Waits for the flush thread to finish processing its current - queue of logging handlers. - """ + """Wait for the flush thread to finish processing its current queue of logging handlers.""" while not self.__terminated.is_set(): # Need to pass a timeout to wait() so that KeyboardInterrupt exceptions are propagated. self.__terminated.wait(_FlushThread._TIMEOUT) def submit(self, action, delay): - """ - Schedules 'action' for 'delay' seconds from now. + """Schedule 'action' for 'delay' seconds from now. - Returns the scheduled event which may be used for later - cancelation (see cancel_event()). + Return the scheduled event which may be used for later cancelation (see cancel_event()). """ event = self.__scheduler.enter(delay, 0, action, ()) @@ -180,11 +154,9 @@ class _FlushThread(threading.Thread): return event def cancel_event(self, event): - """ - Attempts to cancel the specified event. + """Attempt to cancel the specified event. - Returns true if the event was successfully canceled, and returns - false otherwise. + Return true if the event was successfully canceled, and returns false otherwise. """ try: diff --git a/buildscripts/resmokelib/logging/formatters.py b/buildscripts/resmokelib/logging/formatters.py index 058c6d512c8..450d5d29cd8 100644 --- a/buildscripts/resmokelib/logging/formatters.py +++ b/buildscripts/resmokelib/logging/formatters.py @@ -1,22 +1,20 @@ -""" -Custom formatters for the logging handlers. -""" +"""Custom formatters for the logging handlers.""" from __future__ import absolute_import import logging -import sys import time class ISO8601Formatter(logging.Formatter): - """ - An ISO 8601 compliant formatter for log messages. It formats the - timezone as an hour/minute offset and uses a period as the + """An ISO 8601 compliant formatter for log messages. + + It formats the timezone as an hour/minute offset and uses a period as the millisecond separator in order to match the log messages of MongoDB. """ def formatTime(self, record, datefmt=None): + """Return formatted time.""" converted_time = self.converter(record.created) if datefmt is not None: @@ -28,10 +26,7 @@ class ISO8601Formatter(logging.Formatter): @staticmethod def _format_timezone_offset(converted_time): - """ - Returns the timezone as an hour/minute offset in the form - "+HHMM" or "-HHMM". - """ + """Return the timezone as an hour/minute offset in the form "+HHMM" or "-HHMM".""" # Windows treats %z in the format string as %Z, so we compute the hour/minute offset # manually. diff --git a/buildscripts/resmokelib/logging/handlers.py b/buildscripts/resmokelib/logging/handlers.py index 28e35d25a09..982a2f38b6e 100644 --- a/buildscripts/resmokelib/logging/handlers.py +++ b/buildscripts/resmokelib/logging/handlers.py @@ -1,7 +1,4 @@ -""" -Additional handlers that are used as the base classes of the buildlogger -handler. -""" +"""Additional handlers that are used as the base classes of the buildlogger handler.""" from __future__ import absolute_import @@ -27,17 +24,16 @@ _TIMEOUT_SECS = 10 class BufferedHandler(logging.Handler): - """ - A handler class that buffers logging records in memory. Whenever - each record is added to the buffer, a check is made to see if the - buffer should be flushed. If it should, then flush() is expected to - do what's needed. + """A handler class that buffers logging records in memory. + + Whenever each record is added to the buffer, a check is made to see if the buffer + should be flushed. If it should, then flush() is expected to do what's needed. """ def __init__(self, capacity, interval_secs): - """ - Initializes the handler with the buffer size and timeout after - which the buffer is flushed regardless. + """Initialize the handler with the buffer size and timeout. + + These values determine when the buffer is flushed regardless. """ logging.Handler.__init__(self) @@ -68,18 +64,19 @@ class BufferedHandler(logging.Handler): # close() serialize accesses to 'self.__emit_buffer' in a more granular way via # 'self.__emit_lock'. def createLock(self): + """Create lock.""" pass def acquire(self): + """Acquire.""" pass def release(self): + """Release.""" pass - def process_record(self, record): - """ - Applies a transformation to the record before it gets added to - the buffer. + def process_record(self, record): # pylint: disable=no-self-use + """Apply a transformation to the record before it gets added to the buffer. The default implementation returns 'record' unmodified. """ @@ -87,8 +84,7 @@ class BufferedHandler(logging.Handler): return record def emit(self, record): - """ - Emits a record. + """Emit a record. Append the record to the buffer after it has been transformed by process_record(). If the length of the buffer is greater than or @@ -117,9 +113,7 @@ class BufferedHandler(logging.Handler): self.__flush_scheduled_by_emit = True def flush(self): - """ - Ensures all logging output has been flushed. - """ + """Ensure all logging output has been flushed.""" self.__flush(close_called=False) @@ -132,9 +126,7 @@ class BufferedHandler(logging.Handler): self.__flush_scheduled_by_emit = False def __flush(self, close_called): - """ - Ensures all logging output has been flushed. - """ + """Ensure all logging output has been flushed.""" with self.__emit_lock: buf = self.__emit_buffer @@ -147,18 +139,13 @@ class BufferedHandler(logging.Handler): self._flush_buffer_with_lock(buf, close_called) def _flush_buffer_with_lock(self, buf, close_called): - """ - Ensures all logging output has been flushed. - """ + """Ensure all logging output has been flushed.""" raise NotImplementedError("_flush_buffer_with_lock must be implemented by BufferedHandler" " subclasses") def close(self): - """ - Flushes the buffer and tidies up any resources used by this - handler. - """ + """Flush the buffer and tidies up any resources used by this handler.""" with self.__emit_lock: if self.__flush_event is not None: @@ -170,15 +157,10 @@ class BufferedHandler(logging.Handler): class HTTPHandler(object): - """ - A class which sends data to a web server using POST requests. - """ + """A class which sends data to a web server using POST requests.""" def __init__(self, url_root, username, password): - """ - Initializes the handler with the necessary authentication - credentials. - """ + """Initialize the handler with the necessary authentication credentials.""" self.auth_handler = requests.auth.HTTPBasicAuth(username, password) @@ -188,11 +170,9 @@ class HTTPHandler(object): return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/")) def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS): - """ - Sends a POST request to the specified endpoint with the supplied - data. + """Send a POST request to the specified endpoint with the supplied data. - Returns the response, either as a string or a JSON object based + Return the response, either as a string or a JSON object based on the content type. """ diff --git a/buildscripts/resmokelib/logging/loggers.py b/buildscripts/resmokelib/logging/loggers.py index a53186b7aa3..d319928eb5f 100644 --- a/buildscripts/resmokelib/logging/loggers.py +++ b/buildscripts/resmokelib/logging/loggers.py @@ -1,6 +1,4 @@ -""" -Module to hold the logger instances themselves. -""" +"""Module to hold the logger instances themselves.""" from __future__ import absolute_import @@ -20,17 +18,21 @@ EXECUTOR_LOGGER = None def _build_logger_server(logging_config): - """Create and return a new BuildloggerServer if "buildlogger" is configured as - one of the handler class in the configuration, return None otherwise. + """Create and return a new BuildloggerServer. + + This occurs if "buildlogger" is configured as one of the handler class in the configuration, + return None otherwise. """ for logger_name in (FIXTURE_LOGGER_NAME, TESTS_LOGGER_NAME): logger_info = logging_config[logger_name] for handler_info in logger_info["handlers"]: if handler_info["class"] == "buildlogger": return buildlogger.BuildloggerServer() + return None def configure_loggers(logging_config): + """Configure the loggers.""" buildlogger.BUILDLOGGER_FALLBACK = BaseLogger("buildlogger") # The 'buildlogger' prefix is not added to the fallback logger since the prefix of the original # logger will be there as part of the logged message. @@ -39,7 +41,7 @@ def configure_loggers(logging_config): build_logger_server = _build_logger_server(logging_config) fixture_logger = FixtureRootLogger(logging_config, build_logger_server) tests_logger = TestsRootLogger(logging_config, build_logger_server) - global EXECUTOR_LOGGER + global EXECUTOR_LOGGER # pylint: disable=global-statement EXECUTOR_LOGGER = ExecutorRootLogger(logging_config, build_logger_server, fixture_logger, tests_logger) @@ -68,7 +70,7 @@ class BaseLogger(logging.Logger): @property def build_logger_server(self): - """The configured BuildloggerServer instance, or None.""" + """Get the configured BuildloggerServer instance, or None.""" if self._build_logger_server: return self._build_logger_server elif self.parent: @@ -78,7 +80,7 @@ class BaseLogger(logging.Logger): @property def logging_config(self): - """The logging configuration.""" + """Get the logging configuration.""" if self._logging_config: return self._logging_config elif self.parent: @@ -88,6 +90,7 @@ class BaseLogger(logging.Logger): @staticmethod def get_formatter(logger_info): + """Return formatter.""" log_format = logger_info.get("format", _DEFAULT_FORMAT) return formatters.ISO8601Formatter(fmt=log_format) @@ -107,7 +110,7 @@ class RootLogger(BaseLogger): def _configure(self): if self.name not in self.logging_config: - raise ValueError("Logging configuration should contain the %s component", self.name) + raise ValueError("Logging configuration should contain the %s component" % self.name) logger_info = self.logging_config[self.name] formatter = self.get_formatter(logger_info) @@ -158,6 +161,8 @@ class ExecutorRootLogger(RootLogger): class JobLogger(BaseLogger): + """JobLogger class.""" + def __init__(self, test_kind, job_num, parent, fixture_root_logger): """Initialize a JobLogger. @@ -200,7 +205,10 @@ class JobLogger(BaseLogger): class TestLogger(BaseLogger): - def __init__(self, test_name, parent, build_id=None, test_id=None, url=None): + """TestLogger class.""" + + def __init__( # pylint: disable=too-many-arguments + self, test_name, parent, build_id=None, test_id=None, url=None): """Initialize a TestLogger. :param test_name: the test name. @@ -245,6 +253,8 @@ class FixtureRootLogger(RootLogger): class FixtureLogger(BaseLogger): + """FixtureLogger class.""" + def __init__(self, fixture_class, job_num, build_id, fixture_root_logger): """Initialize a FixtureLogger. @@ -277,6 +287,8 @@ class FixtureLogger(BaseLogger): class FixtureNodeLogger(BaseLogger): + """FixtureNodeLogger class.""" + def __init__(self, fixture_class, job_num, node_name, fixture_logger): """Initialize a FixtureNodeLogger. @@ -310,6 +322,8 @@ class TestsRootLogger(RootLogger): class TestQueueLogger(BaseLogger): + """TestQueueLogger class.""" + def __init__(self, test_kind, tests_root_logger): """Initialize a TestQueueLogger. @@ -320,6 +334,8 @@ class TestQueueLogger(BaseLogger): class HookLogger(BaseLogger): + """HookLogger class.""" + def __init__(self, hook_class, fixture_logger, tests_root_logger): """Initialize a HookLogger. @@ -337,9 +353,7 @@ class HookLogger(BaseLogger): def _fallback_buildlogger_handler(include_logger_name=True): - """ - Returns a handler that writes to stderr. - """ + """Return a handler that writes to stderr.""" if include_logger_name: log_format = "[fallback] [%(name)s] %(message)s" else: @@ -353,10 +367,7 @@ def _fallback_buildlogger_handler(include_logger_name=True): def _get_buildlogger_handler_info(logger_info): - """ - Returns the buildlogger handler information if it exists, and None - otherwise. - """ + """Return the buildlogger handler information if it exists, and None otherwise.""" for handler_info in logger_info["handlers"]: handler_info = handler_info.copy() if handler_info.pop("class") == "buildlogger": diff --git a/buildscripts/resmokelib/parser.py b/buildscripts/resmokelib/parser.py index 0aeb969688c..d9f40da3e90 100644 --- a/buildscripts/resmokelib/parser.py +++ b/buildscripts/resmokelib/parser.py @@ -1,6 +1,4 @@ -""" -Parser for command line arguments. -""" +"""Parser for command line arguments.""" from __future__ import absolute_import @@ -15,10 +13,8 @@ from . import utils from .. import resmokeconfig -def parse_command_line(): - """ - Parses the command line arguments passed to resmoke.py. - """ +def parse_command_line(): # pylint: disable=too-many-statements + """Parse the command line arguments passed to resmoke.py.""" parser = optparse.OptionParser() @@ -304,9 +300,7 @@ def parse_command_line(): def validate_options(parser, options, args): - """ - Do preliminary validation on the options and error on any invalid options. - """ + """Do preliminary validation on the options and error on any invalid options.""" if options.shell_port is not None and options.shell_conn_string is not None: parser.error("Cannot specify both `shellPort` and `shellConnString`") @@ -318,9 +312,7 @@ def validate_options(parser, options, args): def validate_benchmark_options(): - """ - Some options are incompatible with benchmark test suites, we error out early if any of - these options are specified. + """Error out early if any options are incompatible with benchmark test suites. :return: None """ @@ -338,10 +330,12 @@ def validate_benchmark_options(): def get_logging_config(values): + """Return logging config values.""" return _get_logging_config(values.logger_file) -def update_config_vars(values): +def update_config_vars(values): # pylint: disable=too-many-statements + """Update config vars.""" config = _config.DEFAULTS.copy() # Override `config` with values from command line arguments. @@ -436,10 +430,7 @@ def update_config_vars(values): def _get_logging_config(pathname): - """ - Attempts to read a YAML configuration from 'pathname' that describes - how resmoke.py should log the tests and fixtures. - """ + """Read YAML configuration from 'pathname' how to log tests and fixtures.""" # Named loggers are specified as the basename of the file, without the .yml extension. if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname): @@ -454,17 +445,14 @@ def _get_logging_config(pathname): def _expand_user(pathname): - """ - Wrapper around os.path.expanduser() to do nothing when given None. - """ + """Provide wrapper around os.path.expanduser() to do nothing when given None.""" if pathname is None: return None return os.path.expanduser(pathname) def _tags_from_list(tags_list): - """ - Returns the list of tags from a list of tag parameter values. + """Return the list of tags from a list of tag parameter values. Each parameter value in the list may be a list of comma separated tags, with empty strings ignored. diff --git a/buildscripts/resmokelib/reportfile.py b/buildscripts/resmokelib/reportfile.py index 7dcf5623a6d..00841de2bc9 100644 --- a/buildscripts/resmokelib/reportfile.py +++ b/buildscripts/resmokelib/reportfile.py @@ -1,6 +1,4 @@ -""" -Manages interactions with the report.json file. -""" +"""Manage interactions with the report.json file.""" from __future__ import absolute_import @@ -11,10 +9,7 @@ from .testing import report as _report def write(suites): - """ - Writes the combined report of all executions if --reportFile was - specified on the command line. - """ + """Write the combined report of all executions if --reportFile was specified.""" if config.REPORT_FILE is None: return diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py index d83cddd9142..3014012511d 100644 --- a/buildscripts/resmokelib/selector.py +++ b/buildscripts/resmokelib/selector.py @@ -1,5 +1,4 @@ -""" -Test selection utility. +"""Test selection utility. Defines filtering rules for what tests to include in a suite depending on whether they apply to C++ unit tests, dbtests, or JS tests. @@ -32,15 +31,17 @@ class TestFileExplorer(object): The file related code has been confined to this class for testability. """ - def is_glob_pattern(self, path): - """Indicates if the provided path is a glob pattern. + @staticmethod + def is_glob_pattern(path): + """Indicate if the provided path is a glob pattern. See buildscripts.resmokelib.utils.globstar.is_glob_pattern(). """ return globstar.is_glob_pattern(path) - def iglob(self, pattern): - """Expands the given glob pattern with regard to the current working directory. + @staticmethod + def iglob(pattern): # noqa: D406,D407,D411,D413 + """Expand the given glob pattern with regard to the current working directory. See buildscripts.resmokelib.utils.globstar.iglob(). Returns: @@ -48,8 +49,9 @@ class TestFileExplorer(object): """ return globstar.iglob(pattern) - def jstest_tags(self, file_path): - """Extracts the tags from a JavaScript test file. + @staticmethod + def jstest_tags(file_path): # noqa: D406,D407,D411,D413 + """Extract the tags from a JavaScript test file. See buildscripts.resmokelib.utils.jscomment.get_tags(). Returns: @@ -57,8 +59,9 @@ class TestFileExplorer(object): """ return jscomment.get_tags(file_path) - def read_root_file(self, root_file_path): - """Reads a file containing the list of root test files. + @staticmethod + def read_root_file(root_file_path): # noqa: D406,D407,D411,D413 + """Read a file containing the list of root test files. Args: root_file_path: the path to a file containing the path of each test on a separate line. @@ -72,19 +75,21 @@ class TestFileExplorer(object): tests.append(test_path) return tests - def fnmatchcase(self, name, pattern): - """Indicates if the given name matches the given pattern. + @staticmethod + def fnmatchcase(name, pattern): + """Indicate if the given name matches the given pattern. See buildscripts.resmokelib.utils.fnmatch.fnmatchcase(). """ return fnmatch.fnmatchcase(name, pattern) - def isfile(self, path): - """Indicates if the given path corresponds to an existing file.""" + @staticmethod + def isfile(path): + """Indicate if the given path corresponds to an existing file.""" return os.path.isfile(path) def list_dbtests(self, dbtest_binary): - """Lists the available dbtests suites.""" + """List the available dbtests suites.""" returncode, stdout = self._run_program(dbtest_binary, ["--list"]) if returncode != 0: @@ -92,8 +97,9 @@ class TestFileExplorer(object): return stdout.splitlines() - def _run_program(self, binary, args): - """Runs a program. + @staticmethod + def _run_program(binary, args): # noqa: D406,D407,D411,D413 + """Run a program. Args: binary: the binary to run. @@ -108,9 +114,11 @@ class TestFileExplorer(object): return program.returncode, stdout - def parse_tag_file(self, test_kind): - """ - Parses the tag file and return a dict of tagged tests, with the key the filename and the + @staticmethod + def parse_tag_file(test_kind): + """Parse the tag file and return a dict of tagged tests. + + The resulting dict will have as a key the filename and the value a list of tags, i.e., {'file1.js': ['tag1', 'tag2'], 'file2.js': ['tag2', 'tag3']}. """ tagged_tests = collections.defaultdict(list) @@ -141,7 +149,7 @@ class _TestList(object): """ def __init__(self, test_file_explorer, roots, tests_are_files=True): - """Initializes the _TestList with a TestFileExplorer component and a list of root tests.""" + """Initialize the _TestList with a TestFileExplorer component and a list of root tests.""" self._test_file_explorer = test_file_explorer self._tests_are_files = tests_are_files self._roots = self._expand_files(roots) if tests_are_files else roots @@ -159,12 +167,12 @@ class _TestList(object): return expanded_tests def include_files(self, include_files, force=False): - """Filters the test list so that it only includes files matching 'include_files'. + """Filter the test list so that it only includes files matching 'include_files'. - Args: - include_files: a list of paths or glob patterns that match the files to include. - force: if True include the matching files that were previously excluded, otherwise - only include files that match and were not previously excluded from this _TestList. + Args: + include_files: a list of paths or glob patterns that match the files to include. + force: if True include the matching files that were previously excluded, otherwise only + include files that match and were not previously excluded from this _TestList. """ if not self._tests_are_files: raise TypeError("_TestList does not contain files.") @@ -178,8 +186,8 @@ class _TestList(object): if force: self._filtered |= set(self._roots) & expanded_include_files - def exclude_files(self, exclude_files): - """Excludes from the test list the files that match elements from 'exclude_files'. + def exclude_files(self, exclude_files): # noqa: D406,D407,D411,D413 + """Exclude from the test list the files that match elements from 'exclude_files'. Args: exclude_files: a list of paths or glob patterns that match the files to exclude. @@ -201,7 +209,7 @@ class _TestList(object): self._filtered.discard(path) def match_tag_expression(self, tag_expression, get_tags): - """Filters the test list to only include tests that match the tag expression. + """Filter the test list to only include tests that match the tag expression. Args: tag_expression: a callable object that takes a list of tags and indicate if the required @@ -212,11 +220,10 @@ class _TestList(object): self._filtered = {test for test in self._filtered if tag_expression(get_tags(test))} def include_any_pattern(self, patterns): - """ - Filters the test list to only include tests that match any of the given glob patterns. - """ + """Filter the test list to only include tests that match any provided glob patterns.""" def match(test): + """Return True if 'test' matches a pattern.""" for pattern in patterns: if test == pattern or fnmatch.fnmatchcase(test, pattern): return True @@ -225,8 +232,7 @@ class _TestList(object): self._filtered = {test for test in self._filtered if match(test)} def get_tests(self): - """ - Returns the test list as a list(str). + """Return the test list as a list(str). The tests are returned in the same order as they are found in the root tests. """ @@ -287,7 +293,7 @@ class _MatchExpression(object): def make_expression(conf): - """Creates a tag matching expression from an expression configuration. + """Create a tag matching expression from an expression configuration. The syntax for the expression configuration is: - expr: str_expr | dict_expr @@ -325,11 +331,10 @@ def _make_expression_list(configs): class _SelectorConfig(object): """Base object to represent the configuration for test selection.""" - def __init__(self, root=None, roots=None, include_files=None, exclude_files=None, - include_tags=None, exclude_tags=None, include_with_any_tags=None, - exclude_with_any_tags=None): - """ - Initializes the _SelectorConfig from the configuration elements. + def __init__( # pylint: disable=too-many-arguments + self, root=None, roots=None, include_files=None, exclude_files=None, include_tags=None, + exclude_tags=None, include_with_any_tags=None, exclude_with_any_tags=None): + """Initialize the _SelectorConfig from the configuration elements. Args: root: the path to a file containing the list of root tests. Incompatible with 'roots'. @@ -367,10 +372,8 @@ class _SelectorConfig(object): return set(list_b) elif list_b is None: return set(list_a) - else: - return set(list_a) | set(list_b) - else: - return None + return set(list_a) | set(list_b) + return None @staticmethod def __make_tags_expression(include_tags, exclude_tags, include_with_any_tags, @@ -389,16 +392,14 @@ class _SelectorConfig(object): if expressions: return _AllOfExpression(expressions) - else: - return None + return None class _Selector(object): """Selection algorithm to select tests matching a selector configuration.""" def __init__(self, test_file_explorer, tests_are_files=True): - """ - Initializes the _Selector. + """Initialize the _Selector. Args: test_file_explorer: a TestFileExplorer instance. @@ -406,7 +407,7 @@ class _Selector(object): self._test_file_explorer = test_file_explorer self._tests_are_files = tests_are_files - def select(self, selector_config): + def select(self, selector_config): # noqa: D406,D407,D411,D413 """Select the test files that match the given configuration. Args: @@ -434,17 +435,18 @@ class _Selector(object): test_list.include_files(selector_config.include_files, force=True) return test_list.get_tests() - def get_tags(self, test_file): - """Retrieves the tags associated with the give test file.""" + @staticmethod + def get_tags(test_file): # pylint: disable=unused-argument + """Retrieve the tags associated with the give test file.""" return [] class _JSTestSelectorConfig(_SelectorConfig): """_SelectorConfig subclass for js_test tests.""" - def __init__(self, roots=None, include_files=None, exclude_files=None, - include_with_any_tags=None, exclude_with_any_tags=None, include_tags=None, - exclude_tags=None): + def __init__( # pylint: disable=too-many-arguments + self, roots=None, include_files=None, exclude_files=None, include_with_any_tags=None, + exclude_with_any_tags=None, include_tags=None, exclude_tags=None): _SelectorConfig.__init__(self, roots=roots, include_files=include_files, exclude_files=exclude_files, include_with_any_tags=include_with_any_tags, @@ -460,6 +462,7 @@ class _JSTestSelector(_Selector): self._tags = self._test_file_explorer.parse_tag_file("js_test") def get_tags(self, test_file): + """Return tags from test_file.""" file_tags = self._test_file_explorer.jstest_tags(test_file) if test_file in self._tags: return list(set(file_tags) | set(self._tags[test_file])) @@ -471,6 +474,7 @@ class _CppTestSelectorConfig(_SelectorConfig): def __init__(self, root=config.DEFAULT_INTEGRATION_TEST_LIST, roots=None, include_files=None, exclude_files=None): + """Initialize _CppTestSelectorConfig.""" if roots: # The 'roots' argument is only present when tests are specified on the command line # and in that case they take precedence over the tests in the root file. @@ -485,9 +489,11 @@ class _CppTestSelector(_Selector): """_Selector subclass for cpp_integration_test and cpp_unit_test tests.""" def __init__(self, test_file_explorer): + """Initialize _CppTestSelector.""" _Selector.__init__(self, test_file_explorer) def select(self, selector_config): + """Return selected tests.""" if selector_config.roots: # Tests have been specified on the command line. We use them without additional # filtering. @@ -500,6 +506,7 @@ class _DbTestSelectorConfig(_SelectorConfig): """_Selector config subclass for db_test tests.""" def __init__(self, binary=None, roots=None, include_suites=None): + """Initialize _DbTestSelectorConfig.""" _SelectorConfig.__init__(self, roots=roots) self.include_suites = utils.default_if_none(include_suites, []) @@ -517,9 +524,11 @@ class _DbTestSelector(_Selector): """_Selector subclass for db_test tests.""" def __init__(self, test_file_explorer): + """Initialize _DbTestSelector.""" _Selector.__init__(self, test_file_explorer, tests_are_files=False) def select(self, selector_config): + """Return selected tests.""" if selector_config.roots: roots = selector_config.roots else: @@ -550,6 +559,7 @@ class _JsonSchemaTestSelectorConfig(_SelectorConfig): """_SelectorConfig subclass for json_schema_test tests.""" def __init__(self, roots, include_files=None, exclude_files=None): + """Initialize _JsonSchemaTestSelectorConfig.""" _SelectorConfig.__init__(self, roots=roots, include_files=include_files, exclude_files=exclude_files) @@ -558,6 +568,7 @@ class _SleepTestCaseSelectorConfig(_SelectorConfig): """_SelectorConfig subclass for sleep_test tests.""" def __init__(self, roots): + """Initialize _SleepTestCaseSelectorConfig.""" _SelectorConfig.__init__(self, roots=roots) @@ -565,6 +576,7 @@ class _SleepTestCaseSelector(_Selector): """_Selector subclass for sleep_test tests.""" def __init__(self, test_file_explorer): + """Initialize _SleepTestCaseSelector.""" _Selector.__init__(self, test_file_explorer, tests_are_files=False) @@ -596,7 +608,7 @@ _SELECTOR_REGISTRY = { def filter_tests(test_kind, selector_config, test_file_explorer=_DEFAULT_TEST_FILE_EXPLORER): - """Filters the tests according to a specified configuration. + """Filter the tests according to a specified configuration. Args: test_kind: the test kind, one of 'cpp_integration_test', 'cpp_unit_test', 'db_test', diff --git a/buildscripts/resmokelib/sighandler.py b/buildscripts/resmokelib/sighandler.py index 5da9ae52ca1..c67d44eb759 100644 --- a/buildscripts/resmokelib/sighandler.py +++ b/buildscripts/resmokelib/sighandler.py @@ -1,6 +1,4 @@ -""" -Utility to support asynchronously signaling the current process. -""" +"""Utility to support asynchronously signaling the current process.""" from __future__ import absolute_import @@ -12,25 +10,23 @@ import threading import time import traceback -_is_windows = (sys.platform == "win32") -if _is_windows: +_IS_WINDOWS = (sys.platform == "win32") +if _IS_WINDOWS: import win32api import win32event -from . import reportfile -from . import testing +from . import reportfile # pylint: disable=wrong-import-position +from . import testing # pylint: disable=wrong-import-position def register(logger, suites, start_time): - """ - On Windows, set up an event object to wait for signal, otherwise, register a signal handler - for the SIGUSR1 signal. - """ + """Register an event object to wait for signal, or a signal handler for SIGUSR1.""" - def _handle_sigusr1(signum, frame): - """ - Signal handler that will dump the stacks of all threads and - then write out the report file and log suite summaries. + def _handle_sigusr1(signum, frame): # pylint: disable=unused-argument + """Signal handler for SIGUSR1. + + The handler will dump the stacks of all threads and write out the report file and + log suite summaries. """ header_msg = "Dumping stacks due to SIGUSR1 signal" @@ -38,9 +34,10 @@ def register(logger, suites, start_time): _dump_and_log(header_msg) def _handle_set_event(event_handle): - """ - Windows event object handler that will dump the stacks of all threads and then write out - the report file and log suite summaries. + """Event object handler for Windows. + + The handler will dump the stacks of all threads and write out the report file and + log suite summaries. """ while True: @@ -58,9 +55,7 @@ def register(logger, suites, start_time): _dump_and_log(header_msg) def _dump_and_log(header_msg): - """ - Dumps the stacks of all threads, writes the report file, and logs the suite summaries. - """ + """Dump the stacks of all threads, write report file, and log suite summaries.""" _dump_stacks(logger, header_msg) reportfile.write(suites) @@ -68,7 +63,7 @@ def register(logger, suites, start_time): # On Windows spawn a thread to wait on an event object for signal to dump stacks. For Cygwin # platforms, we use a signal handler since it supports POSIX signals. - if _is_windows: + if _IS_WINDOWS: # Create unique event_name. event_name = "Global\\Mongo_Python_" + str(os.getpid()) @@ -97,14 +92,12 @@ def register(logger, suites, start_time): def _dump_stacks(logger, header_msg): - """ - Signal handler that will dump the stacks of all threads. - """ + """Signal handler that will dump the stacks of all threads.""" sb = [] sb.append(header_msg) - frames = sys._current_frames() + frames = sys._current_frames() # pylint: disable=protected-access sb.append("Total threads: %d" % (len(frames))) sb.append("") diff --git a/buildscripts/resmokelib/suitesconfig.py b/buildscripts/resmokelib/suitesconfig.py index 18c52683661..87bc1e1e9f5 100644 --- a/buildscripts/resmokelib/suitesconfig.py +++ b/buildscripts/resmokelib/suitesconfig.py @@ -14,9 +14,7 @@ from .. import resmokeconfig def get_named_suites(): - """ - Returns the list of suites available to execute. - """ + """Return the list of suites available to execute.""" # Skip "with_*server" and "no_server" because they do not define any test files to run. executor_only = {"with_server", "with_external_server", "no_server"} @@ -26,8 +24,7 @@ def get_named_suites(): def create_test_membership_map(fail_on_missing_selector=False, test_kind=None): - """ - Returns a dict keyed by test name containing all of the suites that will run that test. + """Return a dict keyed by test name containing all of the suites that will run that test. If 'test_kind' is specified then only the mappings for that kind are returned. Since this iterates through every available suite, it should only be run once. @@ -59,7 +56,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None): def get_suites(suite_files, test_files): - """Retrieves the Suite instances based on suite configuration files and override parameters. + """Retrieve the Suite instances based on suite configuration files and override parameters. Args: suite_files: A list of file paths pointing to suite YAML configuration files. For the suites @@ -93,10 +90,7 @@ def _make_suite_roots(files): def _get_suite_config(pathname): - """ - Attempts to read a YAML configuration from 'pathname' that describes - what tests to run and how to run them. - """ + """Attempt to read YAML configuration from 'pathname' for the suite.""" return _get_yaml_config("suite", pathname) diff --git a/buildscripts/resmokelib/testing/__init__.py b/buildscripts/resmokelib/testing/__init__.py index e4acff00521..eb58f41f7fe 100644 --- a/buildscripts/resmokelib/testing/__init__.py +++ b/buildscripts/resmokelib/testing/__init__.py @@ -1,7 +1,4 @@ -""" -Extension to the unittest package to support buildlogger and parallel -test execution. -""" +"""Extension to the unittest package to support buildlogger and parallel test execution.""" from __future__ import absolute_import diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py index f66515ac8b0..3df4a0a5059 100644 --- a/buildscripts/resmokelib/testing/executor.py +++ b/buildscripts/resmokelib/testing/executor.py @@ -1,6 +1,4 @@ -""" -Driver of the test execution framework. -""" +"""Driver of the test execution framework.""" from __future__ import absolute_import @@ -21,8 +19,7 @@ from ..utils import queue as _queue class TestSuiteExecutor(object): - """ - Executes a test suite. + """Execute a test suite. Responsible for setting up and tearing down the fixtures that the tests execute against. @@ -30,11 +27,10 @@ class TestSuiteExecutor(object): _TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run) - def __init__(self, exec_logger, suite, config=None, fixture=None, hooks=None, - archive_instance=None, archive=None): - """ - Initializes the TestSuiteExecutor with the test suite to run. - """ + def __init__( # pylint: disable=too-many-arguments + self, exec_logger, suite, config=None, fixture=None, hooks=None, archive_instance=None, + archive=None): + """Initialize the TestSuiteExecutor with the test suite to run.""" self.logger = exec_logger if _config.SHELL_CONN_STRING is not None: @@ -69,8 +65,7 @@ class TestSuiteExecutor(object): self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)] def run(self): - """ - Executes the test suite. + """Execute the test suite. Any exceptions that occur during setting up or tearing down a fixture are propagated. @@ -128,9 +123,7 @@ class TestSuiteExecutor(object): self._suite.return_code = return_code def _setup_fixtures(self): - """ - Sets up a fixture for each job. - """ + """Set up a fixture for each job.""" # We reset the internal state of the PortAllocator before calling job.fixture.setup() so # that ports used by the fixture during a test suite run earlier can be reused during this @@ -140,7 +133,7 @@ class TestSuiteExecutor(object): for job in self._jobs: try: job.fixture.setup() - except: + except: # pylint: disable=bare-except self.logger.exception("Encountered an error while setting up %s.", job.fixture) return False @@ -148,16 +141,14 @@ class TestSuiteExecutor(object): for job in self._jobs: try: job.fixture.await_ready() - except: + except: # pylint: disable=bare-except self.logger.exception("Encountered an error while waiting for %s to be ready", job.fixture) return False return True def _run_tests(self, test_queue, teardown_flag): - """ - Starts a thread for each Job instance and blocks until all of - the tests are run. + """Start a thread for each Job instance and block until all of the tests are run. Returns a (combined report, user interrupted) pair, where the report contains the status and timing information of tests run @@ -170,12 +161,12 @@ class TestSuiteExecutor(object): try: # Run each Job instance in its own thread. for job in self._jobs: - t = threading.Thread(target=job, args=(test_queue, interrupt_flag), - kwargs=dict(teardown_flag=teardown_flag)) + thr = threading.Thread(target=job, args=(test_queue, interrupt_flag), + kwargs=dict(teardown_flag=teardown_flag)) # Do not wait for tests to finish executing if interrupted by the user. - t.daemon = True - t.start() - threads.append(t) + thr.daemon = True + thr.start() + threads.append(thr) # SERVER-24729 Need to stagger when jobs start to reduce I/O load if there # are many of them. Both the 5 and the 10 are arbitrary. # Currently only enabled on Evergreen. @@ -192,8 +183,8 @@ class TestSuiteExecutor(object): user_interrupted = True else: # Only wait for all the Job instances if not interrupted by the user. - for t in threads: - t.join() + for thr in threads: + thr.join() reports = [job.report for job in self._jobs] combined_report = _report.TestReport.combine(*reports) @@ -204,8 +195,7 @@ class TestSuiteExecutor(object): return (combined_report, user_interrupted) def _teardown_fixtures(self): - """ - Tears down all of the fixtures. + """Tear down all of the fixtures. Returns true if all fixtures were torn down successfully, and false otherwise. @@ -217,15 +207,13 @@ class TestSuiteExecutor(object): except errors.ServerFailure as err: self.logger.warn("Teardown of %s was not successful: %s", job.fixture, err) success = False - except: + except: # pylint: disable=bare-except self.logger.exception("Encountered an error while tearing down %s.", job.fixture) success = False return success def _make_fixture(self, job_num, job_logger): - """ - Creates a fixture for a job. - """ + """Create a fixture for a job.""" fixture_config = {} fixture_class = fixtures.NOOP_FIXTURE_CLASS @@ -238,10 +226,8 @@ class TestSuiteExecutor(object): return fixtures.make_fixture(fixture_class, fixture_logger, job_num, **fixture_config) - def _make_hooks(self, job_num, fixture): - """ - Creates the hooks for the job's fixture. - """ + def _make_hooks(self, fixture): + """Create the hooks for the job's fixture.""" hooks = [] @@ -256,22 +242,18 @@ class TestSuiteExecutor(object): return hooks def _make_job(self, job_num): - """ - Returns a Job instance with its own fixture, hooks, and test - report. - """ + """Return a Job instance with its own fixture, hooks, and test report.""" job_logger = self.logger.new_job_logger(self._suite.test_kind, job_num) fixture = self._make_fixture(job_num, job_logger) - hooks = self._make_hooks(job_num, fixture) + hooks = self._make_hooks(fixture) report = _report.TestReport(job_logger, self._suite.options) return _job.Job(job_logger, fixture, hooks, report, self.archival, self._suite.options) def _make_test_queue(self): - """ - Returns a queue of TestCase instances. + """Return a queue of TestCase instances. Use a multi-consumer queue instead of a unittest.TestSuite so that the test cases can be dispatched to multiple threads. diff --git a/buildscripts/resmokelib/testing/fixtures/__init__.py b/buildscripts/resmokelib/testing/fixtures/__init__.py index e59a05c9754..87662625f7f 100644 --- a/buildscripts/resmokelib/testing/fixtures/__init__.py +++ b/buildscripts/resmokelib/testing/fixtures/__init__.py @@ -1,6 +1,4 @@ -""" -Fixtures for executing JSTests against. -""" +"""Fixture for executing JSTests against.""" from __future__ import absolute_import @@ -12,4 +10,4 @@ NOOP_FIXTURE_CLASS = _NoOpFixture.REGISTERED_NAME # We dynamically load all modules in the fixtures/ package so that any Fixture classes declared # within them are automatically registered. -_autoloader.load_all_modules(name=__name__, path=__path__) +_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py index 0d80907e680..9b4e69c1128 100644 --- a/buildscripts/resmokelib/testing/fixtures/interface.py +++ b/buildscripts/resmokelib/testing/fixtures/interface.py @@ -1,6 +1,4 @@ -""" -Interface of the different fixtures for executing JSTests against. -""" +"""Interface of the different fixtures for executing JSTests against.""" from __future__ import absolute_import @@ -16,13 +14,11 @@ from ... import logging from ... import utils from ...utils import registry -_FIXTURES = {} +_FIXTURES = {} # type: ignore def make_fixture(class_name, *args, **kwargs): - """ - Factory function for creating Fixture instances. - """ + """Provide factory function for creating Fixture instances.""" if class_name not in _FIXTURES: raise ValueError("Unknown fixture class '%s'" % class_name) @@ -30,20 +26,16 @@ def make_fixture(class_name, *args, **kwargs): class Fixture(object): - """ - Base class for all fixtures. - """ + """Base class for all fixtures.""" - __metaclass__ = registry.make_registry_metaclass(_FIXTURES) + __metaclass__ = registry.make_registry_metaclass(_FIXTURES) # type: ignore # We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute # is defined for all subclasses of Fixture. REGISTERED_NAME = "Fixture" def __init__(self, logger, job_num, dbpath_prefix=None): - """ - Initializes the fixture with a logger instance. - """ + """Initialize the fixture with a logger instance.""" if not isinstance(logger, logging.Logger): raise TypeError("logger must be a Logger instance") @@ -61,20 +53,15 @@ class Fixture(object): self._dbpath_prefix = os.path.join(dbpath_prefix, "job{}".format(self.job_num)) def setup(self): - """ - Creates the fixture. - """ + """Create the fixture.""" pass def await_ready(self): - """ - Blocks until the fixture can be used for testing. - """ + """Block until the fixture can be used for testing.""" pass - def teardown(self, finished=False): - """ - Destroys the fixture. + def teardown(self, finished=False): # noqa + """Destroy the fixture. The fixture's logging handlers are closed if 'finished' is true, which should happen when setup() won't be called again. @@ -92,9 +79,8 @@ class Fixture(object): # want the logs to eventually get flushed. logging.flush.close_later(handler) - def _do_teardown(self): - """ - Destroys the fixture. + def _do_teardown(self): # noqa + """Destroy the fixture. This method must be implemented by subclasses. @@ -103,36 +89,32 @@ class Fixture(object): """ pass - def is_running(self): - """ - Returns true if the fixture is still operating and more tests - can be run, and false otherwise. - """ + def is_running(self): # pylint: disable=no-self-use + """Return true if the fixture is still operating and more tests and can be run.""" return True def get_dbpath_prefix(self): + """Return dbpath prefix.""" return self._dbpath_prefix def get_internal_connection_string(self): - """ - Returns the connection string for this fixture. This is NOT a - driver connection string, but a connection string of the format + """Return the connection string for this fixture. + + This is NOT a driver connection string, but a connection string of the format expected by the mongo::ConnectionString class. """ raise NotImplementedError("get_connection_string must be implemented by Fixture subclasses") def get_driver_connection_url(self): - """ - Return the mongodb connection string as defined here: + """Return the mongodb connection string as defined below. + https://docs.mongodb.com/manual/reference/connection-string/ """ raise NotImplementedError( "get_driver_connection_url must be implemented by Fixture subclasses") def mongo_client(self, read_preference=pymongo.ReadPreference.PRIMARY, timeout_millis=30000): - """ - Returns a pymongo.MongoClient connecting to this fixture with a read - preference of 'read_preference'. + """Return a pymongo.MongoClient connecting to this fixture with specified 'read_preference'. The PyMongo driver will wait up to 'timeout_millis' milliseconds before concluding that the server is unavailable. @@ -154,30 +136,23 @@ class Fixture(object): class ReplFixture(Fixture): - """ - Base class for all fixtures that support replication. - """ + """Base class for all fixtures that support replication.""" - REGISTERED_NAME = registry.LEAVE_UNREGISTERED + REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore AWAIT_REPL_TIMEOUT_MINS = 5 def get_primary(self): - """ - Returns the primary of a replica set, or the master of a - master-slave deployment. - """ + """Return the primary of a replica set.""" raise NotImplementedError("get_primary must be implemented by ReplFixture subclasses") def get_secondaries(self): - """ - Returns a list containing the secondaries of a replica set, or - the slave of a master-slave deployment. - """ + """Return a list containing the secondaries of a replica set.""" raise NotImplementedError("get_secondaries must be implemented by ReplFixture subclasses") def retry_until_wtimeout(self, insert_fn): - """ + """Retry until wtimeout reached. + Given a callback function representing an insert operation on the primary, handle any connection failures, and keep retrying the operation for up to 'AWAIT_REPL_TIMEOUT_MINS' minutes. @@ -221,9 +196,11 @@ class NoOpFixture(Fixture): REGISTERED_NAME = "NoOpFixture" def get_internal_connection_string(self): + """Return the internal connection string.""" return None def get_driver_connection_url(self): + """Return the driver connection URL.""" return None @@ -231,7 +208,7 @@ class FixtureTeardownHandler(object): """A helper class used to teardown nodes inside a cluster and keep track of errors.""" def __init__(self, logger): - """Initializes a FixtureTeardownHandler. + """Initialize a FixtureTeardownHandler. Args: logger: A logger to use to log teardown activity. @@ -241,19 +218,18 @@ class FixtureTeardownHandler(object): self._message = None def was_successful(self): - """Indicates whether the teardowns performed by this instance were all successful.""" + """Indicate whether the teardowns performed by this instance were all successful.""" return self._success def get_error_message(self): - """ - Retrieves the combined error message for all the teardown failures or None if all the - teardowns were successful. + """Retrieve the combined error message for all the teardown failures. + + Return None if all the teardowns were successful. """ return self._message - def teardown(self, fixture, name): - """ - Tears down the given fixture and logs errors instead of raising a ServerFailure exception. + def teardown(self, fixture, name): # noqa: D406,D407,D411,D413 + """Tear down the given fixture and log errors instead of raising a ServerFailure exception. Args: fixture: The fixture to tear down. diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py index a554c6a7044..95f3b067cd2 100644 --- a/buildscripts/resmokelib/testing/fixtures/replicaset.py +++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py @@ -1,6 +1,4 @@ -""" -Replica set fixture for executing JSTests against. -""" +"""Replica set fixture for executing JSTests against.""" from __future__ import absolute_import @@ -17,19 +15,19 @@ from ... import errors from ... import utils -class ReplicaSetFixture(interface.ReplFixture): - """ - Fixture which provides JSTests with a replica set to run against. - """ +class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-instance-attributes + """Fixture which provides JSTests with a replica set to run against.""" # Error response codes copied from mongo/base/error_codes.err. _NODE_NOT_FOUND = 74 - def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None, - dbpath_prefix=None, preserve_dbpath=False, num_nodes=2, - start_initial_sync_node=False, write_concern_majority_journal_default=None, - auth_options=None, replset_config_options=None, voting_secondaries=None, - all_nodes_electable=False, use_replica_set_connection_string=None): + def __init__( # pylint: disable=too-many-arguments + self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None, + preserve_dbpath=False, num_nodes=2, start_initial_sync_node=False, + write_concern_majority_journal_default=None, auth_options=None, + replset_config_options=None, voting_secondaries=None, all_nodes_electable=False, + use_replica_set_connection_string=None): + """Initialize ReplicaSetFixture.""" interface.ReplFixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix) @@ -71,7 +69,8 @@ class ReplicaSetFixture(interface.ReplFixture): self.initial_sync_node = None self.initial_sync_node_idx = -1 - def setup(self): + def setup(self): # pylint: disable=too-many-branches,too-many-statements + """Set up the replica set.""" self.replset_name = self.mongod_options.get("replSet", "rs") if not self.nodes: @@ -113,7 +112,7 @@ class ReplicaSetFixture(interface.ReplFixture): "hidden": 1, "votes": 0 }) - config = {"_id": self.replset_name} + repl_config = {"_id": self.replset_name} client = self.nodes[0].mongo_client() if self.auth_options is not None: @@ -127,33 +126,33 @@ class ReplicaSetFixture(interface.ReplFixture): return if self.write_concern_majority_journal_default is not None: - config[ + repl_config[ "writeConcernMajorityJournalDefault"] = self.write_concern_majority_journal_default else: server_status = client.admin.command({"serverStatus": 1}) cmd_line_opts = client.admin.command({"getCmdLineOpts": 1}) if not (server_status["storageEngine"]["persistent"] and cmd_line_opts["parsed"].get( "storage", {}).get("journal", {}).get("enabled", True)): - config["writeConcernMajorityJournalDefault"] = False + repl_config["writeConcernMajorityJournalDefault"] = False if self.replset_config_options.get("configsvr", False): - config["configsvr"] = True + repl_config["configsvr"] = True if self.replset_config_options.get("settings"): replset_settings = self.replset_config_options["settings"] - config["settings"] = replset_settings + repl_config["settings"] = replset_settings # If secondaries vote, all nodes are not electable, and no election timeout was specified, # increase the election timeout to 24 hours to prevent elections. if self.voting_secondaries and not self.all_nodes_electable: - config.setdefault("settings", {}) - if "electionTimeoutMillis" not in config["settings"]: - config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000 + repl_config.setdefault("settings", {}) + if "electionTimeoutMillis" not in repl_config["settings"]: + repl_config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000 # Start up a single node replica set then reconfigure to the correct size (if the config # contains more than 1 node), so the primary is elected more quickly. - config["members"] = [members[0]] - self.logger.info("Issuing replSetInitiate command: %s", config) - self._configure_repl_set(client, {"replSetInitiate": config}) + repl_config["members"] = [members[0]] + self.logger.info("Issuing replSetInitiate command: %s", repl_config) + self._configure_repl_set(client, {"replSetInitiate": repl_config}) self._await_primary() if self.nodes[1:]: @@ -161,10 +160,10 @@ class ReplicaSetFixture(interface.ReplFixture): # command. for node in self.nodes[1:]: node.await_ready() - config["version"] = 2 - config["members"] = members - self.logger.info("Issuing replSetReconfig command: %s", config) - self._configure_repl_set(client, {"replSetReconfig": config}) + repl_config["version"] = 2 + repl_config["members"] = members + self.logger.info("Issuing replSetReconfig command: %s", repl_config) + self._configure_repl_set(client, {"replSetReconfig": repl_config}) self._await_secondaries() def _configure_repl_set(self, client, cmd_obj): @@ -194,6 +193,7 @@ class ReplicaSetFixture(interface.ReplFixture): time.sleep(5) # Wait a little bit before trying again. def await_ready(self): + """Wait for replica set tpo be ready.""" self._await_primary() self._await_secondaries() @@ -254,6 +254,7 @@ class ReplicaSetFixture(interface.ReplFixture): raise errors.ServerFailure(teardown_handler.get_error_message()) def is_running(self): + """Return True if all nodes in the replica set are running.""" running = all(node.is_running() for node in self.nodes) if self.initial_sync_node: @@ -261,7 +262,8 @@ class ReplicaSetFixture(interface.ReplFixture): return running - def get_primary(self, timeout_secs=30): + def get_primary(self, timeout_secs=30): # pylint: disable=arguments-differ + """Return the primary from a replica set.""" if not self.all_nodes_electable: # The primary is always the first element of the 'nodes' list because all other members # of the replica set are configured with priority=0. @@ -299,17 +301,16 @@ class ReplicaSetFixture(interface.ReplFixture): raise errors.ServerFailure(msg) def get_secondaries(self): + """Return a list of secondaries from the replica set.""" primary = self.get_primary() return [node for node in self.nodes if node.port != primary.port] def get_initial_sync_node(self): + """Return initila sync node from the replica set.""" return self.initial_sync_node def _new_mongod(self, index, replset_name): - """ - Returns a standalone.MongoDFixture configured to be used as a - replica-set member of 'replset_name'. - """ + """Return a standalone.MongoDFixture configured to be used as replica-set member.""" mongod_logger = self._get_logger_for_mongod(index) mongod_options = self.mongod_options.copy() @@ -321,9 +322,9 @@ class ReplicaSetFixture(interface.ReplFixture): mongod_options=mongod_options, preserve_dbpath=self.preserve_dbpath) def _get_logger_for_mongod(self, index): - """ - Returns a new logging.Logger instance for use as the primary, secondary, or initial - sync member of a replica-set. + """Return a new logging.Logger instance. + + The instance is used as the primary, secondary, or initial sync member of a replica-set. """ if index == self.initial_sync_node_idx: @@ -339,6 +340,7 @@ class ReplicaSetFixture(interface.ReplFixture): return self.logger.new_fixture_node_logger(node_name) def get_internal_connection_string(self): + """Return the internal connection string.""" if self.replset_name is None: raise ValueError("Must call setup() before calling get_internal_connection_string()") @@ -348,6 +350,7 @@ class ReplicaSetFixture(interface.ReplFixture): return self.replset_name + "/" + ",".join(conn_strs) def get_driver_connection_url(self): + """Return the driver connection URL.""" if self.replset_name is None: raise ValueError("Must call setup() before calling get_driver_connection_url()") diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py index 5e94b133708..5e5662fbd7f 100644 --- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py +++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py @@ -1,6 +1,4 @@ -""" -Sharded cluster fixture for executing JSTests against. -""" +"""Sharded cluster fixture for executing JSTests against.""" from __future__ import absolute_import @@ -20,24 +18,19 @@ from ... import utils from ...utils import registry -class ShardedClusterFixture(interface.Fixture): - """ - Fixture which provides JSTests with a sharded cluster to run - against. - """ +class ShardedClusterFixture(interface.Fixture): # pylint: disable=too-many-instance-attributes + """Fixture which provides JSTests with a sharded cluster to run against.""" _CONFIGSVR_REPLSET_NAME = "config-rs" _SHARD_REPLSET_NAME_PREFIX = "shard-rs" - def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None, - mongod_executable=None, mongod_options=None, dbpath_prefix=None, - preserve_dbpath=False, num_shards=1, num_rs_nodes_per_shard=None, - separate_configsvr=True, enable_sharding=None, enable_balancer=True, - auth_options=None, configsvr_options=None, shard_options=None): - """ - Initializes ShardedClusterFixture with the different options to - the mongod and mongos processes. - """ + def __init__( # pylint: disable=too-many-arguments,too-many-locals + self, logger, job_num, mongos_executable=None, mongos_options=None, + mongod_executable=None, mongod_options=None, dbpath_prefix=None, preserve_dbpath=False, + num_shards=1, num_rs_nodes_per_shard=None, separate_configsvr=True, + enable_sharding=None, enable_balancer=True, auth_options=None, configsvr_options=None, + shard_options=None): + """Initialize ShardedClusterFixture with different options for the cluster processes.""" interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix) @@ -65,6 +58,7 @@ class ShardedClusterFixture(interface.Fixture): self.shards = [] def setup(self): + """Set up the sharded cluster.""" if self.separate_configsvr: if self.configsvr is None: self.configsvr = self._new_configsvr() @@ -87,6 +81,7 @@ class ShardedClusterFixture(interface.Fixture): shard.setup() def await_ready(self): + """Block until the fixture can be used for testing.""" # Wait for the config server if self.configsvr is not None: self.configsvr.await_ready() @@ -130,9 +125,7 @@ class ShardedClusterFixture(interface.Fixture): primary.admin.command({"refreshLogicalSessionCacheNow": 1}) def _do_teardown(self): - """ - Shuts down the sharded cluster. - """ + """Shut down the sharded cluster.""" self.logger.info("Stopping all members of the sharded cluster...") running_at_start = self.is_running() @@ -158,28 +151,24 @@ class ShardedClusterFixture(interface.Fixture): raise errors.ServerFailure(teardown_handler.get_error_message()) def is_running(self): - """ - Returns true if the config server, all shards, and the mongos - are all still operating, and false otherwise. - """ + """Return true if the all nodes in the cluster are all still operating.""" return (self.configsvr is not None and self.configsvr.is_running() and all(shard.is_running() for shard in self.shards) and self.mongos is not None and self.mongos.is_running()) def get_internal_connection_string(self): + """Return the internal connection string.""" if self.mongos is None: raise ValueError("Must call setup() before calling get_internal_connection_string()") return self.mongos.get_internal_connection_string() def get_driver_connection_url(self): + """Return the driver connection URL.""" return "mongodb://" + self.get_internal_connection_string() def _new_configsvr(self): - """ - Returns a replicaset.ReplicaSetFixture configured to be used as - the config server of a sharded cluster. - """ + """Return a replicaset.ReplicaSetFixture configured as the config server.""" mongod_logger = self.logger.new_fixture_node_logger("configsvr") @@ -207,10 +196,7 @@ class ShardedClusterFixture(interface.Fixture): **configsvr_options) def _new_rs_shard(self, index, num_rs_nodes_per_shard): - """ - Returns a replicaset.ReplicaSetFixture configured to be used as a - shard in a sharded cluster. - """ + """Return a replicaset.ReplicaSetFixture configured as a shard in a sharded cluster.""" mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index)) @@ -236,10 +222,7 @@ class ShardedClusterFixture(interface.Fixture): replset_config_options=replset_config_options, **shard_options) def _new_standalone_shard(self, index): - """ - Returns a standalone.MongoDFixture configured to be used as a - shard in a sharded cluster. - """ + """Return a standalone.MongoDFixture configured as a shard in a sharded cluster.""" mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index)) @@ -258,10 +241,7 @@ class ShardedClusterFixture(interface.Fixture): mongod_options=mongod_options, preserve_dbpath=preserve_dbpath, **shard_options) def _new_mongos(self): - """ - Returns a _MongoSFixture configured to be used as the mongos for - a sharded cluster. - """ + """Return a _MongoSFixture configured to be used as the mongos for a sharded cluster.""" mongos_logger = self.logger.new_fixture_node_logger("mongos") @@ -277,11 +257,9 @@ class ShardedClusterFixture(interface.Fixture): def _add_shard(self, client, shard): """ - Add the specified program as a shard by executing the addShard - command. + Add the specified program as a shard by executing the addShard command. - See https://docs.mongodb.org/manual/reference/command/addShard - for more details. + See https://docs.mongodb.org/manual/reference/command/addShard for more details. """ connection_string = shard.get_internal_connection_string() @@ -290,13 +268,12 @@ class ShardedClusterFixture(interface.Fixture): class _MongoSFixture(interface.Fixture): - """ - Fixture which provides JSTests with a mongos to connect to. - """ + """Fixture which provides JSTests with a mongos to connect to.""" - REGISTERED_NAME = registry.LEAVE_UNREGISTERED + REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None): + """Initialize _MongoSFixture.""" interface.Fixture.__init__(self, logger, job_num) @@ -309,6 +286,7 @@ class _MongoSFixture(interface.Fixture): self.port = None def setup(self): + """Set up the sharded cluster.""" if "port" not in self.mongos_options: self.mongos_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num) self.port = self.mongos_options["port"] @@ -327,6 +305,7 @@ class _MongoSFixture(interface.Fixture): self.mongos = mongos def await_ready(self): + """Block until the fixture can be used for testing.""" deadline = time.time() + standalone.MongoDFixture.AWAIT_READY_TIMEOUT_SECS # Wait until the mongos is accepting connections. The retry logic is necessary to support @@ -383,13 +362,16 @@ class _MongoSFixture(interface.Fixture): self.port, self.mongos.pid, exit_code)) def is_running(self): + """Return true if the cluster is still operating.""" return self.mongos is not None and self.mongos.poll() is None def get_internal_connection_string(self): + """Return the internal connection string.""" if self.mongos is None: raise ValueError("Must call setup() before calling get_internal_connection_string()") return "localhost:%d" % self.port def get_driver_connection_url(self): + """Return the driver connection URL.""" return "mongodb://" + self.get_internal_connection_string() diff --git a/buildscripts/resmokelib/testing/fixtures/standalone.py b/buildscripts/resmokelib/testing/fixtures/standalone.py index 0d761478cd8..3cae2e7884f 100644 --- a/buildscripts/resmokelib/testing/fixtures/standalone.py +++ b/buildscripts/resmokelib/testing/fixtures/standalone.py @@ -1,6 +1,4 @@ -""" -Standalone mongod fixture for executing JSTests against. -""" +"""Standalone mongod fixture for executing JSTests against.""" from __future__ import absolute_import @@ -20,15 +18,14 @@ from ... import utils class MongoDFixture(interface.Fixture): - """ - Fixture which provides JSTests with a standalone mongod to run - against. - """ + """Fixture which provides JSTests with a standalone mongod to run against.""" AWAIT_READY_TIMEOUT_SECS = 300 - def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None, - dbpath_prefix=None, preserve_dbpath=False): + def __init__( # pylint: disable=too-many-arguments + self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None, + preserve_dbpath=False): + """Initialize MongoDFixture with different options for the mongod process.""" interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix) @@ -51,6 +48,7 @@ class MongoDFixture(interface.Fixture): self.port = None def setup(self): + """Set up the mongod.""" if not self.preserve_dbpath: shutil.rmtree(self._dbpath, ignore_errors=True) @@ -78,6 +76,7 @@ class MongoDFixture(interface.Fixture): self.mongod = mongod def await_ready(self): + """Block until the fixture can be used for testing.""" deadline = time.time() + MongoDFixture.AWAIT_READY_TIMEOUT_SECS # Wait until the mongod is accepting connections. The retry logic is necessary to support @@ -134,17 +133,20 @@ class MongoDFixture(interface.Fixture): self.port, self.mongod.pid, exit_code)) def is_running(self): + """Return true if the mongod is still operating.""" return self.mongod is not None and self.mongod.poll() is None def get_dbpath_prefix(self): - """ Returns the _dbpath, as this is the root of the data directory. """ + """Return the _dbpath, as this is the root of the data directory.""" return self._dbpath def get_internal_connection_string(self): + """Return the internal connection string.""" if self.mongod is None: raise ValueError("Must call setup() before calling get_internal_connection_string()") return "localhost:%d" % self.port def get_driver_connection_url(self): + """Return the driver connection URL.""" return "mongodb://" + self.get_internal_connection_string() diff --git a/buildscripts/resmokelib/testing/fixtures/yesfixture.py b/buildscripts/resmokelib/testing/fixtures/yesfixture.py index 6ba62c58e79..618ba8a48cc 100644 --- a/buildscripts/resmokelib/testing/fixtures/yesfixture.py +++ b/buildscripts/resmokelib/testing/fixtures/yesfixture.py @@ -1,6 +1,4 @@ -""" -Fixture for generating lots of log messages. -""" +"""Fixture for generating lots of log messages.""" from __future__ import absolute_import @@ -10,18 +8,18 @@ from . import interface from ...core import programs -class YesFixture(interface.Fixture): - """ - Fixture which spawns potentially several 'yes' executables to generate lots of log messages. - """ +class YesFixture(interface.Fixture): # pylint: disable=abstract-method + """Fixture which spawns several 'yes' executables to generate lots of log messages.""" def __init__(self, logger, job_num, num_instances=1, message_length=100): + """Initialize YesFixture.""" interface.Fixture.__init__(self, logger, job_num) self.__processes = [None] * num_instances self.__message = "y" * message_length def setup(self): + """Start the yes processes.""" for (i, process) in enumerate(self.__processes): process = self._make_process(i) @@ -65,4 +63,5 @@ class YesFixture(interface.Fixture): return success def is_running(self): + """Return true if the yes processes are running.""" return all(process is not None and process.poll() is None for process in self.__processes) diff --git a/buildscripts/resmokelib/testing/hook_test_archival.py b/buildscripts/resmokelib/testing/hook_test_archival.py index 315247261d6..4189dd0ac43 100644 --- a/buildscripts/resmokelib/testing/hook_test_archival.py +++ b/buildscripts/resmokelib/testing/hook_test_archival.py @@ -1,6 +1,4 @@ -""" -Enables supports for archiving tests or hooks. -""" +"""Enable support for archiving tests or hooks.""" from __future__ import absolute_import @@ -13,11 +11,10 @@ from ..utils import globstar class HookTestArchival(object): - """ - Archives hooks and tests to S3. - """ + """Archive hooks and tests to S3.""" def __init__(self, suite, hooks, archive_instance, archive_config): + """Initialize HookTestArchival.""" self.archive_instance = archive_instance archive_config = utils.default_if_none(archive_config, {}) @@ -45,11 +42,11 @@ class HookTestArchival(object): self._lock = threading.Lock() def _should_archive(self, success): - """ Return True if failed test or 'on_success' is True. """ + """Return True if failed test or 'on_success' is True.""" return not success or self.on_success def _archive_hook(self, logger, hook, test, success): - """ Helper to archive hooks. """ + """Provide helper to archive hooks.""" hook_match = hook.REGISTERED_NAME in self.hooks if not hook_match or not self._should_archive(success): return @@ -58,7 +55,7 @@ class HookTestArchival(object): self._archive_hook_or_test(logger, test_name, test) def _archive_test(self, logger, test, success): - """ Helper to archive tests. """ + """Provide helper to archive tests.""" test_name = test.test_name test_match = False for arch_test in self.tests: @@ -72,7 +69,7 @@ class HookTestArchival(object): self._archive_hook_or_test(logger, test_name, test) def archive(self, logger, test, success, hook=None): - """ Archives data files for hooks or tests. """ + """Archive data files for hooks or tests.""" if not config.ARCHIVE_FILE or not self.archive_instance: return if hook: @@ -81,7 +78,7 @@ class HookTestArchival(object): self._archive_test(logger, test, success) def _archive_hook_or_test(self, logger, test_name, test): - """ Trigger archive of data files for a test or hook. """ + """Trigger archive of data files for a test or hook.""" with self._lock: # Test repeat number is how many times the particular test has been archived. diff --git a/buildscripts/resmokelib/testing/hooks/__init__.py b/buildscripts/resmokelib/testing/hooks/__init__.py index 87efcd1c964..82772aa25da 100644 --- a/buildscripts/resmokelib/testing/hooks/__init__.py +++ b/buildscripts/resmokelib/testing/hooks/__init__.py @@ -12,4 +12,4 @@ from ...utils import autoloader as _autoloader # We dynamically load all modules in the hooks/ package so that any Hook classes declared # within them are automatically registered. -_autoloader.load_all_modules(name=__name__, path=__path__) +_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore diff --git a/buildscripts/resmokelib/testing/hooks/check_primary.py b/buildscripts/resmokelib/testing/hooks/check_primary.py index c6beabbee84..f2624496d54 100644 --- a/buildscripts/resmokelib/testing/hooks/check_primary.py +++ b/buildscripts/resmokelib/testing/hooks/check_primary.py @@ -1,6 +1,4 @@ -""" -Testing hook for verifying that the primary has not stepped down or changed. -""" +"""Test hook for verifying that the primary has not stepped down or changed.""" from __future__ import absolute_import @@ -15,6 +13,7 @@ class CheckPrimary(interface.Hook): """Hook that checks that the primary is still primary after the test.""" def __init__(self, hook_logger, rs_fixture): + """Initialize CheckPrimary.""" description = "Verify that the primary has not stepped down or changed" interface.Hook.__init__(self, hook_logger, rs_fixture, description) @@ -39,9 +38,11 @@ class CheckPrimary(interface.Hook): raise no_primary_err def before_test(self, test, test_report): + """Before test hook primary.""" self._primary_url = self._get_primary_url() def after_test(self, test, test_report): + """After test hook primary.""" new_primary_url = self._get_primary_url() if new_primary_url != self._primary_url: diff --git a/buildscripts/resmokelib/testing/hooks/cleanup.py b/buildscripts/resmokelib/testing/hooks/cleanup.py index 39011ec90fd..ebbda2f1edb 100644 --- a/buildscripts/resmokelib/testing/hooks/cleanup.py +++ b/buildscripts/resmokelib/testing/hooks/cleanup.py @@ -1,6 +1,4 @@ -""" -Testing hook for cleaning up data files created by the fixture. -""" +"""Test hook for cleaning up data files created by the fixture.""" from __future__ import absolute_import @@ -10,14 +8,15 @@ from . import interface class CleanEveryN(interface.Hook): - """ - Restarts the fixture after it has ran 'n' tests. + """Restart the fixture after it has ran 'n' tests. + On mongod-related fixtures, this will clear the dbpath. """ DEFAULT_N = 20 def __init__(self, hook_logger, fixture, n=DEFAULT_N): + """Initialize CleanEveryN.""" description = "CleanEveryN (restarts the fixture after running `n` tests)" interface.Hook.__init__(self, hook_logger, fixture, description) @@ -27,10 +26,11 @@ class CleanEveryN(interface.Hook): " the fixture after each test instead of after every %d.", n) n = 1 - self.n = n + self.n = n # pylint: disable=invalid-name self.tests_run = 0 def after_test(self, test, test_report): + """After test cleanup.""" self.tests_run += 1 if self.tests_run < self.n: return @@ -42,7 +42,10 @@ class CleanEveryN(interface.Hook): class CleanEveryNTestCase(interface.DynamicTestCase): + """CleanEveryNTestCase class.""" + def run_test(self): + """Execute test hook.""" try: self.logger.info("%d tests have been run against the fixture, stopping it...", self._hook.tests_run) diff --git a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py index 5df43363330..48d476ceb9a 100644 --- a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py +++ b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py @@ -12,7 +12,8 @@ from buildscripts.resmokelib.testing.hooks import interface class CombineBenchmarkResults(interface.Hook): - """ + """CombineBenchmarkResults class. + The CombineBenchmarkResults hook combines test results from individual benchmark files to a single file. This is useful for generating the json file to feed into the Evergreen performance @@ -22,6 +23,7 @@ class CombineBenchmarkResults(interface.Hook): DESCRIPTION = "Combine JSON results from individual benchmarks" def __init__(self, hook_logger, fixture): + """Initialize CombineBenchmarkResults.""" interface.Hook.__init__(self, hook_logger, fixture, CombineBenchmarkResults.DESCRIPTION) self.report_file = _config.PERF_REPORT_FILE @@ -35,27 +37,30 @@ class CombineBenchmarkResults(interface.Hook): def _strftime(time): return time.strftime("%Y-%m-%dT%H:%M:%SZ") - def after_test(self, test_case, test_report): + def after_test(self, test, test_report): + """Update test report.""" if self.report_file is None: return - bm_report_path = test_case.report_name() + bm_report_path = test.report_name() with open(bm_report_path, "r") as report_file: report_dict = json.load(report_file) self._parse_report(report_dict) def before_suite(self, test_report): + """Set suite start time.""" self.create_time = datetime.datetime.now() def after_suite(self, test_report): + """Update test report.""" if self.report_file is None: return self.end_time = datetime.datetime.now() report = self._generate_perf_plugin_report() - with open(self.report_file, "w") as f: - json.dump(report, f) + with open(self.report_file, "w") as fh: + json.dump(report, fh) def _generate_perf_plugin_report(self): """Format the data to look like a perf plugin report.""" @@ -68,8 +73,7 @@ class CombineBenchmarkResults(interface.Hook): for name, report in self.benchmark_reports.items(): test_report = { - "name": name, - "context": report.context._asdict(), + "name": name, "context": report.context._asdict(), "results": report.generate_perf_plugin_dict() } @@ -93,15 +97,13 @@ class CombineBenchmarkResults(interface.Hook): # Capture information from a Benchmark name in a logical format. -_BenchmarkName = collections.namedtuple("_BenchmarkName", [ - "base_name", - "thread_count", - "statistic_type" -]); +_BenchmarkName = collections.namedtuple("_BenchmarkName", + ["base_name", "thread_count", "statistic_type"]) class _BenchmarkThreadsReport(object): - """ + """_BenchmarkThreadsReport class. + Class representation of a report for all thread levels of a single benchmark test. Each report is designed to correspond to one graph in the Evergreen perf plugin. @@ -127,10 +129,11 @@ class _BenchmarkThreadsReport(object): ] } """ + CONTEXT_FIELDS = [ "date", "cpu_scaling_enabled", "num_cpus", "mhz_per_cpu", "library_build_type" ] - Context = collections.namedtuple("Context", CONTEXT_FIELDS) + Context = collections.namedtuple("Context", CONTEXT_FIELDS) # type: ignore def __init__(self, context_dict): self.context = self.Context(**context_dict) @@ -139,11 +142,11 @@ class _BenchmarkThreadsReport(object): self.thread_benchmark_map = collections.defaultdict(list) def add_report(self, bm_name_obj, report): + """Add to report.""" self.thread_benchmark_map[bm_name_obj.thread_count].append(report) def generate_perf_plugin_dict(self): - """ - Generate perf plugin data points of the following format: + """Generate perf plugin data points of the following format. "1": { "error_values": [ diff --git a/buildscripts/resmokelib/testing/hooks/dbhash.py b/buildscripts/resmokelib/testing/hooks/dbhash.py index fdee7bb9f63..40caa5149c6 100644 --- a/buildscripts/resmokelib/testing/hooks/dbhash.py +++ b/buildscripts/resmokelib/testing/hooks/dbhash.py @@ -1,6 +1,4 @@ -""" -Testing hook for verifying data consistency across a replica set. -""" +"""Test hook for verifying data consistency across a replica set.""" from __future__ import absolute_import @@ -10,13 +8,16 @@ from . import jsfile class CheckReplDBHash(jsfile.DataConsistencyHook): - """ - Checks that the dbhashes of all non-local databases and non-replicated system collections + """Check if the dbhashes match. + + This includes dbhashes for all non-local databases and non-replicated system collections that match on the primary and secondaries. """ - def __init__(self, hook_logger, fixture, shell_options=None): + def __init__( # pylint: disable=super-init-not-called + self, hook_logger, fixture, shell_options=None): + """Initialize CheckReplDBHash.""" description = "Check dbhashes of all replica set or master/slave members" js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash.js") - jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description, - shell_options=shell_options) + jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called + self, hook_logger, fixture, js_filename, description, shell_options=shell_options) diff --git a/buildscripts/resmokelib/testing/hooks/initialsync.py b/buildscripts/resmokelib/testing/hooks/initialsync.py index 905d0a1e913..36ada61ab00 100644 --- a/buildscripts/resmokelib/testing/hooks/initialsync.py +++ b/buildscripts/resmokelib/testing/hooks/initialsync.py @@ -1,6 +1,4 @@ -""" -Testing hook for verifying correctness of initial sync. -""" +"""Test hook for verifying correctness of initial sync.""" from __future__ import absolute_import @@ -18,7 +16,8 @@ from ... import errors class BackgroundInitialSync(interface.Hook): - """ + """BackgroundInitialSync class. + After every test, this hook checks if a background node has finished initial sync and if so, validates it, tears it down, and restarts it. @@ -33,6 +32,7 @@ class BackgroundInitialSync(interface.Hook): DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N def __init__(self, hook_logger, fixture, n=DEFAULT_N, shell_options=None): + """Initialize BackgroundInitialSync.""" if not isinstance(fixture, replicaset.ReplicaSetFixture): raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format( fixture.__class__.__name__)) @@ -40,12 +40,13 @@ class BackgroundInitialSync(interface.Hook): description = "Background Initial Sync" interface.Hook.__init__(self, hook_logger, fixture, description) - self.n = n + self.n = n # pylint: disable=invalid-name self.tests_run = 0 self.random_restarts = 0 self._shell_options = shell_options def after_test(self, test, test_report): + """After test execution.""" self.tests_run += 1 hook_test_case = BackgroundInitialSyncTestCase.create_after_test( @@ -55,14 +56,18 @@ class BackgroundInitialSync(interface.Hook): class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase): + """BackgroundInitialSyncTestCase class.""" JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js") - def __init__(self, logger, test_name, description, base_test_name, hook, shell_options=None): + def __init__( # pylint: disable=too-many-arguments + self, logger, test_name, description, base_test_name, hook, shell_options=None): + """Initialize BackgroundInitialSyncTestCase.""" jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name, hook, self.JS_FILENAME, shell_options) def run_test(self): + """Execute test hook.""" sync_node = self.fixture.get_initial_sync_node() sync_node_conn = sync_node.mongo_client() @@ -96,7 +101,7 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase): if self._hook.random_restarts < 1 and random.random() < 0.2: self.logger.info( "randomly restarting initial sync in the middle of initial sync") - self.__restart_init_sync(sync_node, sync_node_conn) + self.__restart_init_sync(sync_node) self._hook.random_restarts += 1 return except pymongo.errors.OperationFailure: @@ -112,10 +117,10 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase): # Run data validation and dbhash checking. self._js_test.run_test() - self.__restart_init_sync(sync_node, sync_node_conn) + self.__restart_init_sync(sync_node) # Restarts initial sync by shutting down the node, clearing its data, and restarting it. - def __restart_init_sync(self, sync_node, sync_node_conn): + def __restart_init_sync(self, sync_node): # Tear down and restart the initial sync node to start initial sync again. sync_node.teardown() @@ -125,7 +130,8 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase): class IntermediateInitialSync(interface.Hook): - """ + """IntermediateInitialSync class. + This hook accepts a parameter 'n' that specifies a number of tests after which it will start up a node to initial sync, wait for replication to finish, and then validate the data. @@ -135,6 +141,7 @@ class IntermediateInitialSync(interface.Hook): DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N def __init__(self, hook_logger, fixture, n=DEFAULT_N): + """Initialize IntermediateInitialSync.""" if not isinstance(fixture, replicaset.ReplicaSetFixture): raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format( fixture.__class__.__name__)) @@ -142,7 +149,7 @@ class IntermediateInitialSync(interface.Hook): description = "Intermediate Initial Sync" interface.Hook.__init__(self, hook_logger, fixture, description) - self.n = n + self.n = n # pylint: disable=invalid-name self.tests_run = 0 def _should_run_after_test(self): @@ -156,6 +163,7 @@ class IntermediateInitialSync(interface.Hook): return True def after_test(self, test, test_report): + """After test execution.""" if not self._should_run_after_test(): return @@ -166,14 +174,18 @@ class IntermediateInitialSync(interface.Hook): class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase): + """IntermediateInitialSyncTestCase class.""" JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js") - def __init__(self, logger, test_name, description, base_test_name, hook): + def __init__( # pylint: disable=too-many-arguments + self, logger, test_name, description, base_test_name, hook): + """Initialize IntermediateInitialSyncTestCase.""" jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name, hook, self.JS_FILENAME) def run_test(self): + """Execute test hook.""" sync_node = self.fixture.get_initial_sync_node() sync_node_conn = sync_node.mongo_client() diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py index 877b2cc565f..8e938355001 100644 --- a/buildscripts/resmokelib/testing/hooks/interface.py +++ b/buildscripts/resmokelib/testing/hooks/interface.py @@ -1,6 +1,4 @@ -""" -Interface for customizing the behavior of a test fixture. -""" +"""Interface for customizing the behavior of a test fixture.""" from __future__ import absolute_import @@ -11,13 +9,11 @@ from ... import errors from ...logging import loggers from ...utils import registry -_HOOKS = {} +_HOOKS = {} # type: ignore def make_hook(class_name, *args, **kwargs): - """ - Factory function for creating Hook instances. - """ + """Provide factory function for creating Hook instances.""" if class_name not in _HOOKS: raise ValueError("Unknown hook class '%s'" % class_name) @@ -26,18 +22,14 @@ def make_hook(class_name, *args, **kwargs): class Hook(object): - """ - The common interface all Hooks will inherit from. - """ + """Common interface all Hooks will inherit from.""" - __metaclass__ = registry.make_registry_metaclass(_HOOKS) + __metaclass__ = registry.make_registry_metaclass(_HOOKS) # type: ignore REGISTERED_NAME = registry.LEAVE_UNREGISTERED def __init__(self, hook_logger, fixture, description): - """ - Initializes the Hook with the specified fixture. - """ + """Initialize the Hook with the specified fixture.""" if not isinstance(hook_logger, loggers.HookLogger): raise TypeError("logger must be a HookLogger instance") @@ -47,42 +39,38 @@ class Hook(object): self.description = description def before_suite(self, test_report): - """ - The test runner calls this exactly once before they start - running the suite. - """ + """Test runner calls this exactly once before they start running the suite.""" pass def after_suite(self, test_report): - """ - The test runner calls this exactly once after all tests have - finished executing. Be sure to reset the behavior back to its - original state so that it can be run again. + """Invoke by test runner calls this exactly once after all tests have finished executing. + + Be sure to reset the behavior back to its original state so that it can be run again. """ pass def before_test(self, test, test_report): - """ - Each test will call this before it executes. - """ + """Each test will call this before it executes.""" pass def after_test(self, test, test_report): - """ - Each test will call this after it executes. - """ + """Each test will call this after it executes.""" pass class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method - def __init__(self, logger, test_name, description, base_test_name, hook): + """DynamicTestCase class.""" + + def __init__( # pylint: disable=too-many-arguments + self, logger, test_name, description, base_test_name, hook): + """Initialize DynamicTestCase.""" testcase.TestCase.__init__(self, logger, "Hook", test_name) self.description = description self._hook = hook self._base_test_name = base_test_name def run_dynamic_test(self, test_report): - """Helper method to run a dynamic test and update the test report.""" + """Provide helper method to run a dynamic test and update the test report.""" test_report.startTest(self, dynamic=True) try: self.run_test() @@ -102,11 +90,12 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method test_report.stopTest(self) def as_command(self): + """Provide base method.""" return "(dynamic test case)" @classmethod def create_before_test(cls, logger, base_test, hook, *args, **kwargs): - """Creates a hook dynamic test to be run before an existing test.""" + """Create a hook dynamic test to be run before an existing test.""" base_test_name = base_test.short_name() test_name = cls._make_test_name(base_test_name, hook) description = "{} before running '{}'".format(hook.description, base_test_name) @@ -114,7 +103,7 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method @classmethod def create_after_test(cls, logger, base_test, hook, *args, **kwargs): - """Creates a hook dynamic test to be run after an existing test.""" + """Create a hook dynamic test to be run after an existing test.""" base_test_name = base_test.short_name() test_name = cls._make_test_name(base_test_name, hook) description = "{} after running '{}'".format(hook.description, base_test_name) diff --git a/buildscripts/resmokelib/testing/hooks/jsfile.py b/buildscripts/resmokelib/testing/hooks/jsfile.py index 76b2de31313..e95d3d6d780 100644 --- a/buildscripts/resmokelib/testing/hooks/jsfile.py +++ b/buildscripts/resmokelib/testing/hooks/jsfile.py @@ -1,7 +1,4 @@ -""" -Interface for customizing the behavior of a test fixture by executing a -JavaScript file. -""" +"""Interface for customizing the behavior of a test fixture by executing a JavaScript file.""" from __future__ import absolute_import @@ -16,19 +13,23 @@ class JSHook(interface.Hook): REGISTERED_NAME = registry.LEAVE_UNREGISTERED - def __init__(self, hook_logger, fixture, js_filename, description, shell_options=None): + def __init__( # pylint: disable=too-many-arguments + self, hook_logger, fixture, js_filename, description, shell_options=None): + """Initialize JSHook.""" interface.Hook.__init__(self, hook_logger, fixture, description) self._js_filename = js_filename self._shell_options = shell_options def _should_run_after_test(self): # pylint: disable=no-self-use - """ + """Provide base callback. + Callback that can be overrided by subclasses to indicate if the JavaScript file should be executed after the current test. """ return True def after_test(self, test, test_report): + """After test execution.""" if not self._should_run_after_test(): return @@ -49,6 +50,7 @@ class DataConsistencyHook(JSHook): REGISTERED_NAME = registry.LEAVE_UNREGISTERED def after_test(self, test, test_report): + """After test execution.""" try: JSHook.after_test(self, test, test_report) except errors.TestFailure as err: @@ -58,23 +60,29 @@ class DataConsistencyHook(JSHook): class DynamicJSTestCase(interface.DynamicTestCase): """A dynamic TestCase that runs a JavaScript file.""" - def __init__(self, logger, test_name, description, base_test_name, hook, js_filename, - shell_options=None): + def __init__( # pylint: disable=too-many-arguments + self, logger, test_name, description, base_test_name, hook, js_filename, + shell_options=None): + """Initialize DynamicJSTestCase.""" interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name, hook) self._js_test = jstest.JSTestCase(logger, js_filename, shell_options=shell_options) def override_logger(self, new_logger): + """Override logger.""" interface.DynamicTestCase.override_logger(self, new_logger) self._js_test.override_logger(new_logger) def reset_logger(self): + """Reset the logger.""" interface.DynamicTestCase.reset_logger(self) self._js_test.reset_logger() def configure(self, fixture, *args, **kwargs): # pylint: disable=unused-argument + """Configure the fixture.""" interface.DynamicTestCase.configure(self, fixture, *args, **kwargs) self._js_test.configure(fixture, *args, **kwargs) def run_test(self): + """Execute the test.""" self._js_test.run_test() diff --git a/buildscripts/resmokelib/testing/hooks/oplog.py b/buildscripts/resmokelib/testing/hooks/oplog.py index ca9e8d58228..ceb81bb8fd6 100644 --- a/buildscripts/resmokelib/testing/hooks/oplog.py +++ b/buildscripts/resmokelib/testing/hooks/oplog.py @@ -1,7 +1,4 @@ -""" -Testing hook for verifying members of a replica set have matching -oplogs. -""" +"""Test hook for verifying members of a replica set have matching oplogs.""" from __future__ import absolute_import @@ -10,13 +7,13 @@ import os.path from . import jsfile -class CheckReplOplogs(jsfile.DataConsistencyHook): - """ - Checks that local.oplog.rs matches on the primary and secondaries. - """ +class CheckReplOplogs(jsfile.DataConsistencyHook): # pylint: disable=non-parent-init-called,super-init-not-called + """Check that local.oplog.rs matches on the primary and secondaries.""" - def __init__(self, hook_logger, fixture, shell_options=None): + def __init__( # pylint: disable=super-init-not-called + self, hook_logger, fixture, shell_options=None): + """Initialize CheckReplOplogs.""" description = "Check oplogs of all replica set members" js_filename = os.path.join("jstests", "hooks", "run_check_repl_oplogs.js") - jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description, - shell_options=shell_options) + jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called + self, hook_logger, fixture, js_filename, description, shell_options=shell_options) diff --git a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py index a6924fe52b6..bf3ed131b52 100644 --- a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py +++ b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py @@ -1,7 +1,4 @@ -""" -Testing hook for verifying correctness of a secondary's behavior during -an unclean shutdown. -""" +"""Test hook for verifying correctness of secondary's behavior during an unclean shutdown.""" from __future__ import absolute_import @@ -20,15 +17,16 @@ from ... import errors class PeriodicKillSecondaries(interface.Hook): - """ - Periodically kills the secondaries in a replica set and verifies - that they can reach the SECONDARY state without having connectivity + """Periodically kills the secondaries in a replica set. + + Also verifies that the secondaries can reach the SECONDARY state without having connectivity to the primary after an unclean shutdown. """ DEFAULT_PERIOD_SECS = 30 def __init__(self, hook_logger, rs_fixture, period_secs=DEFAULT_PERIOD_SECS): + """Initialize PeriodicKillSecondaries.""" if not isinstance(rs_fixture, replicaset.ReplicaSetFixture): raise TypeError("{} either does not support replication or does not support writing to" " its oplog early".format(rs_fixture.__class__.__name__)) @@ -46,6 +44,7 @@ class PeriodicKillSecondaries(interface.Hook): self._last_test = None def after_suite(self, test_report): + """Run after suite.""" if self._start_time is not None: # Ensure that we test killing the secondary and having it reach state SECONDARY after # being restarted at least once when running the suite. @@ -54,6 +53,7 @@ class PeriodicKillSecondaries(interface.Hook): self._run(test_report) def before_test(self, test, test_report): + """Run before test.""" if self._start_time is not None: # The "rsSyncApplyStop" failpoint is already enabled. return @@ -66,6 +66,7 @@ class PeriodicKillSecondaries(interface.Hook): self._start_time = time.time() def after_test(self, test, test_report): + """Run after test.""" self._last_test = test # Kill the secondaries and verify that they can reach the SECONDARY state if the specified @@ -116,12 +117,17 @@ class PeriodicKillSecondaries(interface.Hook): class PeriodicKillSecondariesTestCase(interface.DynamicTestCase): - def __init__(self, logger, test_name, description, base_test_name, hook, test_report): + """PeriodicKillSecondariesTestCase class.""" + + def __init__( # pylint: disable=too-many-arguments + self, logger, test_name, description, base_test_name, hook, test_report): + """Initialize PeriodicKillSecondariesTestCase.""" interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name, hook) self._test_report = test_report def run_test(self): + """Run the test.""" self._kill_secondaries() self._check_secondaries_and_restart_fixture() @@ -143,7 +149,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase): for secondary in self.fixture.get_secondaries(): # Disable the "rsSyncApplyStop" failpoint on the secondary to have it resume applying # oplog entries. - self._hook._disable_rssyncapplystop(secondary) + self._hook._disable_rssyncapplystop(secondary) # pylint: disable=protected-access # Wait a little bit for the secondary to start apply oplog entries so that we are more # likely to kill the mongod process while it is partway into applying a batch. @@ -229,7 +235,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase): self.fixture.setup() self.fixture.await_ready() - def _check_invariants_as_standalone(self, secondary): + def _check_invariants_as_standalone(self, secondary): # pylint: disable=too-many-branches # We remove the --replSet option in order to start the node as a standalone. replset_name = secondary.mongod_options.pop("replSet") diff --git a/buildscripts/resmokelib/testing/hooks/stepdown.py b/buildscripts/resmokelib/testing/hooks/stepdown.py index 9e6e99d6663..2521d65f1af 100644 --- a/buildscripts/resmokelib/testing/hooks/stepdown.py +++ b/buildscripts/resmokelib/testing/hooks/stepdown.py @@ -1,6 +1,4 @@ -""" -Testing hook that periodically makes the primary of a replica set step down. -""" +"""Test hook that periodically makes the primary of a replica set step down.""" from __future__ import absolute_import import collections @@ -18,15 +16,15 @@ from buildscripts.resmokelib.testing.fixtures import shardedcluster class ContinuousStepdown(interface.Hook): - """The ContinuousStepdown hook regularly connects to replica sets and sends a replSetStepDown - command. - """ + """Regularly connect to replica sets and send a replSetStepDown command.""" + DESCRIPTION = ("Continuous stepdown (steps down the primary of replica sets at regular" " intervals)") - def __init__(self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True, - stepdown_duration_secs=10, stepdown_interval_ms=8000): - """Initializes the ContinuousStepdown. + def __init__( # pylint: disable=too-many-arguments + self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True, + stepdown_duration_secs=10, stepdown_interval_ms=8000): + """Initialize the ContinuousStepdown. Args: hook_logger: the logger instance for this hook. @@ -48,6 +46,7 @@ class ContinuousStepdown(interface.Hook): self._stepdown_thread = None def before_suite(self, test_report): + """Before suite.""" if not self._rs_fixtures: self._add_fixture(self._fixture) self._stepdown_thread = _StepdownThread(self.logger, self._rs_fixtures, @@ -57,15 +56,18 @@ class ContinuousStepdown(interface.Hook): self._stepdown_thread.start() def after_suite(self, test_report): + """After suite.""" self.logger.info("Stopping the stepdown thread.") self._stepdown_thread.stop() def before_test(self, test, test_report): + """Before test.""" self._check_thread() self.logger.info("Resuming the stepdown thread.") self._stepdown_thread.resume() def after_test(self, test, test_report): + """After test.""" self._check_thread() self.logger.info("Pausing the stepdown thread.") self._stepdown_thread.pause() @@ -92,8 +94,11 @@ class ContinuousStepdown(interface.Hook): self._add_fixture(fixture.configsvr) -class _StepdownThread(threading.Thread): +class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-attributes + """_StepdownThread class.""" + def __init__(self, logger, rs_fixtures, stepdown_interval_secs, stepdown_duration_secs): + """Initialize _StepdownThread.""" threading.Thread.__init__(self, name="StepdownThread") self.daemon = True self.logger = logger @@ -114,6 +119,7 @@ class _StepdownThread(threading.Thread): self._step_up_stats = collections.Counter() def run(self): + """Execute the thread.""" if not self._rs_fixtures: self.logger.warning("No replica set on which to run stepdowns.") return @@ -135,7 +141,7 @@ class _StepdownThread(threading.Thread): self._wait(wait_secs) def stop(self): - """Stops the thread.""" + """Stop the thread.""" self._is_stopped_evt.set() # Unpause to allow the thread to finish. self.resume() @@ -145,7 +151,7 @@ class _StepdownThread(threading.Thread): return self._is_stopped_evt.is_set() def pause(self): - """Pauses the thread.""" + """Pause the thread.""" self._is_resumed_evt.clear() # Wait until we are no longer executing stepdowns. self._is_idle_evt.wait() @@ -153,7 +159,7 @@ class _StepdownThread(threading.Thread): self._await_primaries() def resume(self): - """Resumes the thread.""" + """Resume the thread.""" self._is_resumed_evt.set() self.logger.info( diff --git a/buildscripts/resmokelib/testing/hooks/validate.py b/buildscripts/resmokelib/testing/hooks/validate.py index 20cf99c1158..3239ddbdc06 100644 --- a/buildscripts/resmokelib/testing/hooks/validate.py +++ b/buildscripts/resmokelib/testing/hooks/validate.py @@ -1,7 +1,4 @@ -""" -Testing hook for verifying the consistency and integrity of collection -and index data. -""" +"""Test hook for verifying the consistency and integrity of collection and index data.""" from __future__ import absolute_import @@ -11,13 +8,16 @@ from . import jsfile class ValidateCollections(jsfile.DataConsistencyHook): - """ - Runs full validation on all collections in all databases on every stand-alone + """Run full validation. + + This will run on all collections in all databases on every stand-alone node, primary replica-set node, or primary shard node. """ - def __init__(self, hook_logger, fixture, shell_options=None): + def __init__( # pylint: disable=super-init-not-called + self, hook_logger, fixture, shell_options=None): + """Initialize ValidateCollections.""" description = "Full collection validation" js_filename = os.path.join("jstests", "hooks", "run_validate_collections.js") - jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description, - shell_options=shell_options) + jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called + self, hook_logger, fixture, js_filename, description, shell_options=shell_options) diff --git a/buildscripts/resmokelib/testing/job.py b/buildscripts/resmokelib/testing/job.py index 33831f4e84c..a890eb3a820 100644 --- a/buildscripts/resmokelib/testing/job.py +++ b/buildscripts/resmokelib/testing/job.py @@ -1,7 +1,4 @@ -""" -Enables supports for running tests simultaneously by processing them -from a multi-consumer queue. -""" +"""Enable running tests simultaneously by processing them from a multi-consumer queue.""" from __future__ import absolute_import @@ -13,14 +10,11 @@ from ..utils import queue as _queue class Job(object): - """ - Runs tests from a queue. - """ + """Run tests from a queue.""" - def __init__(self, logger, fixture, hooks, report, archival, suite_options): - """ - Initializes the job with the specified fixture and hooks. - """ + def __init__( # pylint: disable=too-many-arguments + self, logger, fixture, hooks, report, archival, suite_options): + """Initialize the job with the specified fixture and hooks.""" self.logger = logger self.fixture = fixture @@ -30,9 +24,7 @@ class Job(object): self.suite_options = suite_options def __call__(self, queue, interrupt_flag, teardown_flag=None): - """ - Continuously executes tests from 'queue' and records their - details in 'report'. + """Continuously execute tests from 'queue' and records their details in 'report'. If 'teardown_flag' is not None, then 'self.fixture.teardown()' will be called before this method returns. If an error occurs @@ -47,7 +39,7 @@ class Job(object): # Stop running tests immediately. self.logger.error("Received a StopExecution exception: %s.", err) should_stop = True - except: + except: # pylint: disable=bare-except # Unknown error, stop execution. self.logger.exception("Encountered an error during test execution.") should_stop = True @@ -64,15 +56,12 @@ class Job(object): except errors.ServerFailure as err: self.logger.warn("Teardown of %s was not successful: %s", self.fixture, err) teardown_flag.set() - except: + except: # pylint: disable=bare-except self.logger.exception("Encountered an error while tearing down %s.", self.fixture) teardown_flag.set() def _run(self, queue, interrupt_flag): - """ - Calls the before/after suite hooks and continuously executes - tests from 'queue'. - """ + """Call the before/after suite hooks and continuously execute tests from 'queue'.""" for hook in self.hooks: hook.before_suite(self.report) @@ -91,9 +80,7 @@ class Job(object): hook.after_suite(self.report) def _execute_test(self, test): - """ - Calls the before/after test hooks and executes 'test'. - """ + """Call the before/after test hooks and execute 'test'.""" test.configure(self.fixture, config.NUM_CLIENTS_PER_FIXTURE) self._run_hooks_before_tests(test) @@ -101,26 +88,26 @@ class Job(object): test(self.report) try: if self.suite_options.fail_fast and not self.report.wasSuccessful(): - self.logger.info("%s failed, so stopping..." % (test.shortDescription())) - raise errors.StopExecution("%s failed" % (test.shortDescription())) + self.logger.info("%s failed, so stopping..." % (test.short_description())) + raise errors.StopExecution("%s failed" % (test.short_description())) if not self.fixture.is_running(): self.logger.error( "%s marked as a failure because the fixture crashed during the test.", - test.shortDescription()) + test.short_description()) self.report.setFailure(test, return_code=2) # Always fail fast if the fixture fails. raise errors.StopExecution("%s not running after %s" % (self.fixture, - test.shortDescription())) + test.short_description())) finally: - success = self.report._find_test_info(test).status == "pass" + success = self.report.find_test_info(test).status == "pass" if self.archival: self.archival.archive(self.logger, test, success) self._run_hooks_after_tests(test) def _run_hook(self, hook, hook_function, test): - """ Helper to run hook and archival. """ + """Provide helper to run hook and archival.""" try: success = False hook_function(test, self.report) @@ -130,8 +117,7 @@ class Job(object): self.archival.archive(self.logger, test, success, hook=hook) def _run_hooks_before_tests(self, test): - """ - Runs the before_test method on each of the hooks. + """Run the before_test method on each of the hooks. Swallows any TestFailure exceptions if set to continue on failure, and reraises any other exceptions. @@ -145,13 +131,13 @@ class Job(object): except errors.ServerFailure: self.logger.exception("%s marked as a failure by a hook's before_test.", - test.shortDescription()) + test.short_description()) self._fail_test(test, sys.exc_info(), return_code=2) raise errors.StopExecution("A hook's before_test failed") except errors.TestFailure: self.logger.exception("%s marked as a failure by a hook's before_test.", - test.shortDescription()) + test.short_description()) self._fail_test(test, sys.exc_info(), return_code=1) if self.suite_options.fail_fast: raise errors.StopExecution("A hook's before_test failed") @@ -164,8 +150,7 @@ class Job(object): raise def _run_hooks_after_tests(self, test): - """ - Runs the after_test method on each of the hooks. + """Run the after_test method on each of the hooks. Swallows any TestFailure exceptions if set to continue on failure, and reraises any other exceptions. @@ -179,13 +164,13 @@ class Job(object): except errors.ServerFailure: self.logger.exception("%s marked as a failure by a hook's after_test.", - test.shortDescription()) + test.short_description()) self.report.setFailure(test, return_code=2) raise errors.StopExecution("A hook's after_test failed") except errors.TestFailure: self.logger.exception("%s marked as a failure by a hook's after_test.", - test.shortDescription()) + test.short_description()) self.report.setFailure(test, return_code=1) if self.suite_options.fail_fast: raise errors.StopExecution("A hook's after_test failed") @@ -195,9 +180,7 @@ class Job(object): raise def _fail_test(self, test, exc_info, return_code=1): - """ - Helper to record a test as a failure with the provided return - code. + """Provide helper to record a test as a failure with the provided return code. This method should not be used if 'test' has already been started, instead use TestReport.setFailure(). @@ -210,10 +193,9 @@ class Job(object): @staticmethod def _drain_queue(queue): - """ - Removes all elements from 'queue' without actually doing - anything to them. Necessary to unblock the main thread that is - waiting for 'queue' to be empty. + """Remove all elements from 'queue' without actually doing anything to them. + + Necessary to unblock the main thread that is waiting for 'queue' to be empty. """ try: diff --git a/buildscripts/resmokelib/testing/report.py b/buildscripts/resmokelib/testing/report.py index f13cfdc9a84..c968449c8a2 100644 --- a/buildscripts/resmokelib/testing/report.py +++ b/buildscripts/resmokelib/testing/report.py @@ -1,6 +1,6 @@ -""" -Extension to the unittest.TestResult to support additional test status -and timing information for the report.json file. +"""Extension to the unittest.TestResult. + +This is used to support additional test status and timing information for the report.json file. """ from __future__ import absolute_import @@ -14,15 +14,12 @@ from .. import config as _config from .. import logging -class TestReport(unittest.TestResult): - """ - Records test status and timing information. - """ +# pylint: disable=attribute-defined-outside-init +class TestReport(unittest.TestResult): # pylint: disable=too-many-instance-attributes + """Record test status and timing information.""" def __init__(self, job_logger, suite_options): - """ - Initializes the TestReport with the buildlogger configuration. - """ + """Initialize the TestReport with the buildlogger configuration.""" unittest.TestResult.__init__(self) @@ -35,8 +32,7 @@ class TestReport(unittest.TestResult): @classmethod def combine(cls, *reports): - """ - Merges the results from multiple TestReport instances into one. + """Merge the results from multiple TestReport instances into one. If the same test is present in multiple reports, then one that failed or errored is more preferred over one that succeeded. @@ -54,7 +50,7 @@ class TestReport(unittest.TestResult): if not isinstance(report, TestReport): raise TypeError("reports must be a list of TestReport instances") - with report._lock: + with report._lock: # pylint: disable=protected-access for test_info in report.test_infos: # If the user triggers a KeyboardInterrupt exception while a test is running, # then it is possible for 'test_info' to be modified by a job thread later on. @@ -93,10 +89,8 @@ class TestReport(unittest.TestResult): return combined_report - def startTest(self, test, dynamic=False): - """ - Called immediately before 'test' is run. - """ + def startTest(self, test, dynamic=False): # pylint: disable=invalid-name,arguments-differ + """Call before 'test' is run.""" unittest.TestResult.startTest(self, test) @@ -119,15 +113,13 @@ class TestReport(unittest.TestResult): test.override_logger(test_logger) - def stopTest(self, test): - """ - Called immediately after 'test' has run. - """ + def stopTest(self, test): # pylint: disable=invalid-name + """Call after 'test' has run.""" unittest.TestResult.stopTest(self, test) with self._lock: - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) test_info.end_time = time.time() time_taken = test_info.end_time - test_info.start_time @@ -143,11 +135,8 @@ class TestReport(unittest.TestResult): # Restore the original logger for the test. test.reset_logger() - def addError(self, test, err): - """ - Called when a non-failureException was raised during the - execution of 'test'. - """ + def addError(self, test, err): # pylint: disable=invalid-name + """Call when a non-failureException was raised during the execution of 'test'.""" unittest.TestResult.addError(self, test, err) @@ -155,18 +144,16 @@ class TestReport(unittest.TestResult): self.num_errored += 1 # We don't distinguish between test failures and Python errors in Evergreen. - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) test_info.status = "error" test_info.evergreen_status = "fail" test_info.return_code = test.return_code - def setError(self, test): - """ - Used to change the outcome of an existing test to an error. - """ + def setError(self, test): # pylint: disable=invalid-name + """Change the outcome of an existing test to an error.""" with self._lock: - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) if test_info.end_time is None: raise ValueError("stopTest was not called on %s" % (test.basename())) @@ -181,18 +168,15 @@ class TestReport(unittest.TestResult): self.num_errored = len(self.get_errored()) self.num_interrupted = len(self.get_interrupted()) - def addFailure(self, test, err): - """ - Called when a failureException was raised during the execution - of 'test'. - """ + def addFailure(self, test, err): # pylint: disable=invalid-name + """Call when a failureException was raised during the execution of 'test'.""" unittest.TestResult.addFailure(self, test, err) with self._lock: self.num_failed += 1 - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) test_info.status = "fail" if test_info.dynamic: # Dynamic tests are used for data consistency checks, so the failures are never @@ -202,13 +186,11 @@ class TestReport(unittest.TestResult): test_info.evergreen_status = self.suite_options.report_failure_status test_info.return_code = test.return_code - def setFailure(self, test, return_code=1): - """ - Used to change the outcome of an existing test to a failure. - """ + def setFailure(self, test, return_code=1): # pylint: disable=invalid-name + """Change the outcome of an existing test to a failure.""" with self._lock: - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) if test_info.end_time is None: raise ValueError("stopTest was not called on %s" % (test.basename())) @@ -227,68 +209,51 @@ class TestReport(unittest.TestResult): self.num_errored = len(self.get_errored()) self.num_interrupted = len(self.get_interrupted()) - def addSuccess(self, test): - """ - Called when 'test' executed successfully. - """ + def addSuccess(self, test): # pylint: disable=invalid-name + """Call when 'test' executed successfully.""" unittest.TestResult.addSuccess(self, test) with self._lock: self.num_succeeded += 1 - test_info = self._find_test_info(test) + test_info = self.find_test_info(test) test_info.status = "pass" test_info.evergreen_status = "pass" test_info.return_code = test.return_code - def wasSuccessful(self): - """ - Returns true if all tests executed successfully. - """ + def wasSuccessful(self): # pylint: disable=invalid-name + """Return true if all tests executed successfully.""" with self._lock: return self.num_failed == self.num_errored == self.num_interrupted == 0 def get_successful(self): - """ - Returns the status and timing information of the tests that - executed successfully. - """ + """Return the status and timing information of the tests that executed successfully.""" with self._lock: return [test_info for test_info in self.test_infos if test_info.status == "pass"] def get_failed(self): - """ - Returns the status and timing information of the tests that - raised a failureException during their execution. - """ + """Return the status and timing information of tests that raised a failureException.""" with self._lock: return [test_info for test_info in self.test_infos if test_info.status == "fail"] def get_errored(self): - """ - Returns the status and timing information of the tests that - raised a non-failureException during their execution. - """ + """Return the status and timing information of tests that raised a non-failureException.""" with self._lock: return [test_info for test_info in self.test_infos if test_info.status == "error"] def get_interrupted(self): - """ - Returns the status and timing information of the tests that had - their execution interrupted. - """ + """Return the status and timing information of tests that were execution interrupted.""" with self._lock: return [test_info for test_info in self.test_infos if test_info.status == "timeout"] def as_dict(self): - """ - Return the test result information as a dictionary. + """Return the test result information as a dictionary. Used to create the report.json file. """ @@ -318,8 +283,7 @@ class TestReport(unittest.TestResult): @classmethod def from_dict(cls, report_dict): - """ - Returns the test report instance copied from a dict (generated in as_dict). + """Return the test report instance copied from a dict (generated in as_dict). Used when combining reports instances. """ @@ -349,9 +313,7 @@ class TestReport(unittest.TestResult): return report def reset(self): - """ - Resets the test report back to its initial state. - """ + """Reset the test report back to its initial state.""" with self._lock: self.test_infos = [] @@ -362,11 +324,8 @@ class TestReport(unittest.TestResult): self.num_errored = 0 self.num_interrupted = 0 - def _find_test_info(self, test): - """ - Returns the status and timing information associated with - 'test'. - """ + def find_test_info(self, test): + """Return the status and timing information associated with 'test'.""" test_id = test.id() @@ -379,15 +338,11 @@ class TestReport(unittest.TestResult): raise ValueError("Details for %s not found in the report" % (test.basename())) -class _TestInfo(object): - """ - Holder for the test status and timing information. - """ +class _TestInfo(object): # pylint: disable=too-many-instance-attributes + """Holder for the test status and timing information.""" def __init__(self, test_id, dynamic): - """ - Initializes the _TestInfo instance. - """ + """Initialize the _TestInfo instance.""" self.test_id = test_id self.dynamic = dynamic diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py index 07d72cb65b4..1a5c06b09f1 100644 --- a/buildscripts/resmokelib/testing/suite.py +++ b/buildscripts/resmokelib/testing/suite.py @@ -1,7 +1,4 @@ -""" -Holder for the (test kind, list of tests) pair with additional metadata about when and how they -execute. -""" +"""Holder for the (test kind, list of tests) pair with additional metadata their execution.""" from __future__ import absolute_import @@ -16,9 +13,10 @@ from .. import selector as _selector def synchronized(method): - """Decorator to enfore instance lock ownership when calling the method.""" + """Provide decorator to enfore instance lock ownership when calling the method.""" def synced(self, *args, **kwargs): + """Sync an instance lock.""" lock = getattr(self, "_lock") with lock: return method(self, *args, **kwargs) @@ -26,15 +24,11 @@ def synchronized(method): return synced -class Suite(object): - """ - A suite of tests of a particular kind (e.g. C++ unit tests, dbtests, jstests). - """ +class Suite(object): # pylint: disable=too-many-instance-attributes + """A suite of tests of a particular kind (e.g. C++ unit tests, dbtests, jstests).""" def __init__(self, suite_name, suite_config, suite_options=_config.SuiteOptions.ALL_INHERITED): - """ - Initializes the suite with the specified name and configuration. - """ + """Initialize the suite with the specified name and configuration.""" self._lock = threading.RLock() self._suite_name = suite_name @@ -58,10 +52,7 @@ class Suite(object): self._partial_reports = None def _get_tests_for_kind(self, test_kind): - """ - Returns the tests to run based on the 'test_kind'-specific - filtering policy. - """ + """Return the tests to run based on the 'test_kind'-specific filtering policy.""" test_info = self.get_selector_config() # The mongos_test doesn't have to filter anything, the test_info is just the arguments to @@ -79,15 +70,11 @@ class Suite(object): return tests, excluded def get_name(self): - """ - Returns the name of the test suite. - """ + """Return the name of the test suite.""" return self._suite_name def get_display_name(self): - """ - Returns the name of the test suite with a unique identifier for its SuiteOptions. - """ + """Return the name of the test suite with a unique identifier for its SuiteOptions.""" if self.options.description is None: return self.get_name() @@ -95,9 +82,7 @@ class Suite(object): return "{} ({})".format(self.get_name(), self.options.description) def get_selector_config(self): - """ - Returns the "selector" section of the YAML configuration. - """ + """Return the "selector" section of the YAML configuration.""" if "selector" not in self._suite_config: return {} @@ -124,75 +109,62 @@ class Suite(object): return selector def get_executor_config(self): - """ - Returns the "executor" section of the YAML configuration. - """ + """Return the "executor" section of the YAML configuration.""" return self._suite_config["executor"] def get_test_kind_config(self): - """ - Returns the "test_kind" section of the YAML configuration. - """ + """Return the "test_kind" section of the YAML configuration.""" return self._suite_config["test_kind"] @property def options(self): + """Get the options.""" return self._suite_options.resolve() def with_options(self, suite_options): - """ - Returns a Suite instance with the specified resmokelib.config.SuiteOptions. - """ + """Return a Suite instance with the specified resmokelib.config.SuiteOptions.""" return Suite(self._suite_name, self._suite_config, suite_options) @synchronized def record_suite_start(self): - """ - Records the start time of the suite. - """ + """Record the start time of the suite.""" self._suite_start_time = time.time() @synchronized def record_suite_end(self): - """ - Records the end time of the suite. - """ + """Record the end time of the suite.""" self._suite_end_time = time.time() @synchronized def record_test_start(self, partial_reports): - """ - Records the start time of an execution and stores the - TestReports for currently running jobs. + """Record the start time of an execution. + + The result is stored in the TestReports for currently running jobs. """ self._test_start_times.append(time.time()) self._partial_reports = partial_reports @synchronized def record_test_end(self, report): - """ - Records the end time of an execution. - """ + """Record the end time of an execution.""" self._test_end_times.append(time.time()) self._reports.append(report) self._partial_reports = None @synchronized def get_active_report(self): - """ - Returns the partial report of the currently running execution, if there is one. - """ + """Return the partial report of the currently running execution, if there is one.""" if not self._partial_reports: return None return _report.TestReport.combine(*self._partial_reports) @synchronized def get_reports(self): - """ - Returns the list of reports. If there's an execution currently - in progress, then a report for the partial results is included - in the returned list. + """Return the list of reports. + + If there's an execution currently in progress, then a report for the partial results + is included in the returned list. """ if self._partial_reports is not None: @@ -202,9 +174,7 @@ class Suite(object): @synchronized def summarize(self, sb): - """ - Appends a summary of the suite onto the string builder 'sb'. - """ + """Append a summary of the suite onto the string builder 'sb'.""" if not self._reports and not self._partial_reports: sb.append("No tests ran.") summary = _summary.Summary(0, 0.0, 0, 0, 0, 0) @@ -234,9 +204,9 @@ class Suite(object): @synchronized def summarize_latest(self, sb): - """ - Returns a summary of the latest execution of the suite and appends a - summary of that execution onto the string builder 'sb'. + """Return a summary of the latest execution of the suite. + + Also append a summary of that execution onto the string builder 'sb'. If there's an execution currently in progress, then the partial summary of that execution is appended to 'sb'. @@ -251,10 +221,10 @@ class Suite(object): return self._summarize_report(active_report, self._test_start_times[-1], end_time, sb) def _summarize_repeated(self, sb): - """ - Returns the summary information of all executions and appends - each execution's summary onto the string builder 'sb'. Also - appends information of how many repetitions there were. + """Return the summary information of all executions. + + Also append each execution's summary onto the string builder 'sb' and + information of how many repetitions there were. """ reports = self.get_reports() # Also includes the combined partial reports. @@ -283,21 +253,19 @@ class Suite(object): return combined_summary def _summarize_execution(self, iteration, sb): - """ - Returns the summary information of the execution given by - 'iteration' and appends a summary of that execution onto the - string builder 'sb'. + """Return the summary information of the execution given by 'iteration'. + + Also append a summary of that execution onto the string builder 'sb'. """ return self._summarize_report(self._reports[iteration], self._test_start_times[iteration], self._test_end_times[iteration], sb) def _summarize_report(self, report, start_time, end_time, sb): - """ - Returns the summary information of the execution given by - 'report' that started at 'start_time' and finished at - 'end_time', and appends a summary of that execution onto the - string builder 'sb'. + """Return the summary information of the execution. + + The summary is for 'report' that started at 'start_time' and finished at 'end_time'. + Also append a summary of that execution onto the string builder 'sb'. """ time_taken = end_time - start_time @@ -333,6 +301,7 @@ class Suite(object): @staticmethod def log_summaries(logger, suites, time_taken): + """Log summary of all suites.""" sb = [] sb.append("Summary of all suites: %d suites ran in %0.2f seconds" % (len(suites), time_taken)) diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py index cf3649c3e16..dc92e0b5b34 100644 --- a/buildscripts/resmokelib/testing/summary.py +++ b/buildscripts/resmokelib/testing/summary.py @@ -1,6 +1,4 @@ -""" -Holder for summary information about a test suite. -""" +"""Holder for summary information about a test suite.""" from __future__ import absolute_import @@ -12,9 +10,7 @@ Summary = collections.namedtuple( def combine(summary1, summary2): - """ - Returns a summary representing the sum of 'summary1' and 'summary2'. - """ + """Return a summary representing the sum of 'summary1' and 'summary2'.""" args = [] for i in xrange(len(Summary._fields)): args.append(summary1[i] + summary2[i]) diff --git a/buildscripts/resmokelib/testing/testcases/__init__.py b/buildscripts/resmokelib/testing/testcases/__init__.py index a397c04fda6..52869d99de8 100644 --- a/buildscripts/resmokelib/testing/testcases/__init__.py +++ b/buildscripts/resmokelib/testing/testcases/__init__.py @@ -1,6 +1,4 @@ -""" -Package containing subclasses of unittest.TestCase. -""" +"""Package containing subclasses of unittest.TestCase.""" from __future__ import absolute_import @@ -9,4 +7,4 @@ from ...utils import autoloader as _autoloader # We dynamically load all modules in the testcases/ package so that any TestCase classes declared # within them are automatically registered. -_autoloader.load_all_modules(name=__name__, path=__path__) +_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore diff --git a/buildscripts/resmokelib/testing/testcases/benchmark_test.py b/buildscripts/resmokelib/testing/testcases/benchmark_test.py index c30fff20f55..ea506c4f7e5 100644 --- a/buildscripts/resmokelib/testing/testcases/benchmark_test.py +++ b/buildscripts/resmokelib/testing/testcases/benchmark_test.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for tests using a MongoDB vendored version of Google Benchmark. -""" +"""The unittest.TestCase for tests using a MongoDB vendored version of Google Benchmark.""" from __future__ import absolute_import @@ -12,23 +10,22 @@ from buildscripts.resmokelib.testing.testcases import interface class BenchmarkTestCase(interface.ProcessTestCase): - """ - A Benchmark test to execute. - """ + """A Benchmark test to execute.""" REGISTERED_NAME = "benchmark_test" def __init__(self, logger, program_executable, program_options=None): - """ - Initializes the BenchmarkTestCase with the executable to run. - """ + """Initialize the BenchmarkTestCase with the executable to run.""" + interface.ProcessTestCase.__init__(self, logger, "Benchmark test", program_executable) parser.validate_benchmark_options() self.bm_executable = program_executable self.suite_bm_options = program_options + self.bm_options = {} def configure(self, fixture, *args, **kwargs): + """Configure BenchmarkTestCase.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) # 1. Set the default benchmark options, including the out file path, which is based on the @@ -65,6 +62,7 @@ class BenchmarkTestCase(interface.ProcessTestCase): self.bm_options = bm_options def report_name(self): + """Return report name.""" return self.bm_executable + ".json" def _make_process(self): diff --git a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py index b4170581821..df6d7c9fa41 100644 --- a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py +++ b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for C++ integration tests. -""" +"""The unittest.TestCase for C++ integration tests.""" from __future__ import absolute_import @@ -10,16 +8,12 @@ from ... import utils class CPPIntegrationTestCase(interface.ProcessTestCase): - """ - A C++ integration test to execute. - """ + """A C++ integration test to execute.""" REGISTERED_NAME = "cpp_integration_test" def __init__(self, logger, program_executable, program_options=None): - """ - Initializes the CPPIntegrationTestCase with the executable to run. - """ + """Initialize the CPPIntegrationTestCase with the executable to run.""" interface.ProcessTestCase.__init__(self, logger, "C++ integration test", program_executable) @@ -27,6 +21,7 @@ class CPPIntegrationTestCase(interface.ProcessTestCase): self.program_options = utils.default_if_none(program_options, {}).copy() def configure(self, fixture, *args, **kwargs): + """Configure the test case.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) self.program_options["connectionString"] = self.fixture.get_internal_connection_string() diff --git a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py index 96f20796911..f9512f8feb5 100644 --- a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py +++ b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for C++ unit tests. -""" +"""The unittest.TestCase for C++ unit tests.""" from __future__ import absolute_import @@ -10,16 +8,12 @@ from ... import utils class CPPUnitTestCase(interface.ProcessTestCase): - """ - A C++ unit test to execute. - """ + """A C++ unit test to execute.""" REGISTERED_NAME = "cpp_unit_test" def __init__(self, logger, program_executable, program_options=None): - """ - Initializes the CPPUnitTestCase with the executable to run. - """ + """Initialize the CPPUnitTestCase with the executable to run.""" interface.ProcessTestCase.__init__(self, logger, "C++ unit test", program_executable) diff --git a/buildscripts/resmokelib/testing/testcases/dbtest.py b/buildscripts/resmokelib/testing/testcases/dbtest.py index 15316a0f197..295114d60a5 100644 --- a/buildscripts/resmokelib/testing/testcases/dbtest.py +++ b/buildscripts/resmokelib/testing/testcases/dbtest.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for dbtests. -""" +"""The unittest.TestCase for dbtests.""" from __future__ import absolute_import @@ -15,16 +13,12 @@ from ... import utils class DBTestCase(interface.ProcessTestCase): - """ - A dbtest to execute. - """ + """A dbtest to execute.""" REGISTERED_NAME = "db_test" def __init__(self, logger, dbtest_suite, dbtest_executable=None, dbtest_options=None): - """ - Initializes the DBTestCase with the dbtest suite to run. - """ + """Initialize the DBTestCase with the dbtest suite to run.""" interface.ProcessTestCase.__init__(self, logger, "dbtest suite", dbtest_suite) @@ -35,6 +29,7 @@ class DBTestCase(interface.ProcessTestCase): self.dbtest_options = utils.default_if_none(dbtest_options, {}).copy() def configure(self, fixture, *args, **kwargs): + """Configure DBTestCase.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) # If a dbpath was specified, then use it as a container for all other dbpaths. @@ -64,8 +59,7 @@ class DBTestCase(interface.ProcessTestCase): @staticmethod def _get_dbpath_prefix(): """ - Returns the prefix of the dbpath to use for the dbtest - executable. + Return the prefix of the dbpath to use for the dbtest executable. Order of preference: 1. The --dbpathPrefix specified at the command line. diff --git a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py index 0d397200cfc..62efc0a5959 100644 --- a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py +++ b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for FSM workloads. -""" +"""The unittest.TestCase for FSM workloads.""" from __future__ import absolute_import @@ -16,7 +14,7 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase): REGISTERED_NAME = "fsm_workload_test" def __init__(self, logger, fsm_workload, shell_executable=None, shell_options=None): - """Initializes the FSMWorkloadTestCase with the FSM workload file.""" + """Initialize the FSMWorkloadTestCase with the FSM workload file.""" jsrunnerfile.JSRunnerFileTestCase.__init__( self, logger, "FSM workload", fsm_workload, @@ -25,6 +23,7 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase): @property def fsm_workload(self): + """Get the test name.""" return self.test_name def _populate_test_data(self, test_data): diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py index f66abef0f3b..183e69f9d36 100644 --- a/buildscripts/resmokelib/testing/testcases/interface.py +++ b/buildscripts/resmokelib/testing/testcases/interface.py @@ -1,6 +1,6 @@ -""" -Subclass of unittest.TestCase with helpers for spawning a separate -process to perform the actual test case. +"""Subclass of unittest.TestCase with helpers for spawning a separate process. + +This is used to perform the actual test case. """ from __future__ import absolute_import @@ -12,33 +12,25 @@ import unittest from ... import logging from ...utils import registry -_TEST_CASES = {} +_TEST_CASES = {} # type: ignore def make_test_case(test_kind, *args, **kwargs): - """ - Factory function for creating TestCase instances. - """ - + """Provide factory function for creating TestCase instances.""" if test_kind not in _TEST_CASES: raise ValueError("Unknown test kind '%s'" % test_kind) return _TEST_CASES[test_kind](*args, **kwargs) class TestCase(unittest.TestCase): - """ - A test case to execute. - """ + """A test case to execute.""" - __metaclass__ = registry.make_registry_metaclass(_TEST_CASES) + __metaclass__ = registry.make_registry_metaclass(_TEST_CASES) # type: ignore REGISTERED_NAME = registry.LEAVE_UNREGISTERED def __init__(self, logger, test_kind, test_name): - """ - Initializes the TestCase with the name of the test. - """ - + """Initialize the TestCase with the name of the test.""" unittest.TestCase.__init__(self, methodName="run_test") if not isinstance(logger, logging.Logger): @@ -54,7 +46,7 @@ class TestCase(unittest.TestCase): # logger is an instance of TestQueueLogger. When the TestCase is created by a hook # implementation it is an instance of BaseLogger. self.logger = logger - # Used to store the logger when overridden by a test logger in Report.startTest(). + # Used to store the logger when overridden by a test logger in Report.start_test(). self._original_logger = None self.test_kind = test_kind @@ -66,32 +58,27 @@ class TestCase(unittest.TestCase): self.is_configured = False def long_name(self): - """ - Returns the path to the test, relative to the current working directory. - """ + """Return the path to the test, relative to the current working directory.""" return os.path.relpath(self.test_name) def basename(self): - """ - Returns the basename of the test. - """ + """Return the basename of the test.""" return os.path.basename(self.test_name) def short_name(self): - """ - Returns the basename of the test without the file extension. - """ + """Return the basename of the test without the file extension.""" return os.path.splitext(self.basename())[0] def id(self): + """Return the id of the test.""" return self.test_name - def shortDescription(self): + def short_description(self): + """Return the short_description of the test.""" return "%s %s" % (self.test_kind, self.test_name) def override_logger(self, new_logger): - """ - Overrides this instance's logger with a new logger. + """Override this instance's logger with a new logger. This method is used by the repport to set the test logger. """ @@ -100,15 +87,13 @@ class TestCase(unittest.TestCase): self.logger = new_logger def reset_logger(self): - """Resets this instance's logger to its original value.""" + """Reset this instance's logger to its original value.""" assert self._original_logger, "Logger was not overridden" self.logger = self._original_logger self._original_logger = None def configure(self, fixture, *args, **kwargs): # pylint: disable=unused-argument - """ - Stores 'fixture' as an attribute for later use during execution. - """ + """Store 'fixture' as an attribute for later use during execution.""" if self.is_configured: raise RuntimeError("configure can only be called once") @@ -116,15 +101,11 @@ class TestCase(unittest.TestCase): self.fixture = fixture def run_test(self): - """ - Runs the specified test. - """ + """Run the specified test.""" raise NotImplementedError("run_test must be implemented by TestCase subclasses") def as_command(self): - """ - Returns the command invocation used to run the test. - """ + """Return the command invocation used to run the test.""" raise NotImplementedError("as_command must be implemented by TestCase subclasses") @@ -132,6 +113,7 @@ class ProcessTestCase(TestCase): # pylint: disable=abstract-method """Base class for TestCases that executes an external process.""" def run_test(self): + """Run the test.""" try: shell = self._make_process() self._execute(shell) @@ -143,29 +125,22 @@ class ProcessTestCase(TestCase): # pylint: disable=abstract-method raise def as_command(self): - """ - Returns the command invocation used to run the test. - """ + """Return the command invocation used to run the test.""" return self._make_process().as_command() def _execute(self, process): - """ - Runs the specified process. - """ - self.logger.info("Starting %s...\n%s", self.shortDescription(), process.as_command()) + """Run the specified process.""" + self.logger.info("Starting %s...\n%s", self.short_description(), process.as_command()) process.start() - self.logger.info("%s started with pid %s.", self.shortDescription(), process.pid) + self.logger.info("%s started with pid %s.", self.short_description(), process.pid) self.return_code = process.wait() if self.return_code != 0: - raise self.failureException("%s failed" % (self.shortDescription())) + raise self.failureException("%s failed" % (self.short_description())) - self.logger.info("%s finished.", self.shortDescription()) + self.logger.info("%s finished.", self.short_description()) def _make_process(self): - """ - Returns a new Process instance that could be used to run the - test or log the command. - """ + """Return a new Process instance that could be used to run the test or log the command.""" raise NotImplementedError("_make_process must be implemented by TestCase subclasses") diff --git a/buildscripts/resmokelib/testing/testcases/json_schema_test.py b/buildscripts/resmokelib/testing/testcases/json_schema_test.py index 8380b246bf6..08e5a2d71a8 100644 --- a/buildscripts/resmokelib/testing/testcases/json_schema_test.py +++ b/buildscripts/resmokelib/testing/testcases/json_schema_test.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for JSON Schema tests. -""" +"""The unittest.TestCase for JSON Schema tests.""" from __future__ import absolute_import @@ -16,7 +14,7 @@ class JSONSchemaTestCase(jsrunnerfile.JSRunnerFileTestCase): REGISTERED_NAME = "json_schema_test" def __init__(self, logger, json_filename, shell_executable=None, shell_options=None): - """Initializes the JSONSchemaTestCase with the JSON test file.""" + """Initialize the JSONSchemaTestCase with the JSON test file.""" jsrunnerfile.JSRunnerFileTestCase.__init__( self, logger, "JSON Schema test", json_filename, @@ -25,6 +23,7 @@ class JSONSchemaTestCase(jsrunnerfile.JSRunnerFileTestCase): @property def json_filename(self): + """Get the JSON filename.""" return self.test_name def _populate_test_data(self, test_data): diff --git a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py index 45a9e5d4944..c2da41faf37 100644 --- a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py +++ b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for tests with a static JavaScript runner file. -""" +"""The unittest.TestCase for tests with a static JavaScript runner file.""" from __future__ import absolute_import @@ -16,9 +14,10 @@ class JSRunnerFileTestCase(interface.ProcessTestCase): REGISTERED_NAME = registry.LEAVE_UNREGISTERED - def __init__(self, logger, test_kind, test_name, test_runner_file, shell_executable=None, - shell_options=None): - """Initializes the JSRunnerFileTestCase with the 'test_name' file.""" + def __init__( # pylint: disable=too-many-arguments + self, logger, test_kind, test_name, test_runner_file, shell_executable=None, + shell_options=None): + """Initialize the JSRunnerFileTestCase with the 'test_name' file.""" interface.ProcessTestCase.__init__(self, logger, test_kind, test_name) @@ -29,6 +28,7 @@ class JSRunnerFileTestCase(interface.ProcessTestCase): self.test_runner_file = test_runner_file def configure(self, fixture, *args, **kwargs): + """Configure the js runner.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) global_vars = self.shell_options.get("global_vars", {}).copy() @@ -40,7 +40,8 @@ class JSRunnerFileTestCase(interface.ProcessTestCase): self.shell_options["global_vars"] = global_vars def _populate_test_data(self, test_data): - """ + """Provide base method. + This method is intended to be overridden by subclasses in order to define the configuration necessary for the static JavaScript runner file. """ diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py index 747e43fe01f..9386ddf9306 100644 --- a/buildscripts/resmokelib/testing/testcases/jstest.py +++ b/buildscripts/resmokelib/testing/testcases/jstest.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for JavaScript tests. -""" +"""The unittest.TestCase for JavaScript tests.""" from __future__ import absolute_import @@ -18,16 +16,12 @@ from ...utils import registry class _SingleJSTestCase(interface.ProcessTestCase): - """ - A jstest to execute. - """ + """A jstest to execute.""" REGISTERED_NAME = registry.LEAVE_UNREGISTERED def __init__(self, logger, js_filename, shell_executable=None, shell_options=None): - """ - Initializes the _SingleJSTestCase with the JS file to run. - """ + """Initialize the _SingleJSTestCase with the JS file to run.""" interface.ProcessTestCase.__init__(self, logger, "JSTest", js_filename) @@ -38,11 +32,11 @@ class _SingleJSTestCase(interface.ProcessTestCase): self.shell_options = utils.default_if_none(shell_options, {}).copy() def configure(self, fixture, *args, **kwargs): + """Configure the jstest.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) def configure_shell(self): - """ - Sets up the global variables for the shell, and data/ directory for the mongod. + """Set up the global variables for the shell, and data/ directory for the mongod. configure_shell() only needs to be called once per test. Therefore if creating multiple _SingleJSTestCase instances to be run in parallel, only call configure_shell() on one of @@ -101,11 +95,7 @@ class _SingleJSTestCase(interface.ProcessTestCase): self.shell_options["process_kwargs"] = process_kwargs def _get_data_dir(self, global_vars): - """ - Returns the value that the mongo shell should set for the - MongoRunner.dataDir property. - """ - + """Return the value that mongo shell should set for the MongoRunner.dataDir property.""" # Command line options override the YAML configuration. data_dir_prefix = utils.default_if_none(config.DBPATH_PREFIX, global_vars.get("MongoRunner.dataDir")) @@ -120,40 +110,38 @@ class _SingleJSTestCase(interface.ProcessTestCase): class JSTestCase(interface.ProcessTestCase): - """ - A wrapper for several copies of a SingleJSTest to execute. - """ + """A wrapper for several copies of a SingleJSTest to execute.""" REGISTERED_NAME = "js_test" class ThreadWithException(threading.Thread): - """ - A wrapper for the thread class that lets us propagate exceptions. - """ + """A wrapper for the thread class that lets us propagate exceptions.""" def __init__(self, *args, **kwargs): + """Initialize JSTestCase.""" threading.Thread.__init__(self, *args, **kwargs) self.exc_info = None def run(self): + """Run the jstest.""" try: threading.Thread.run(self) - except: + except: # pylint: disable=bare-except self.exc_info = sys.exc_info() DEFAULT_CLIENT_NUM = 1 def __init__(self, logger, js_filename, shell_executable=None, shell_options=None): - """ - Initializes the JSTestCase with the JS file to run. - """ + """Initialize the JSTestCase with the JS file to run.""" interface.ProcessTestCase.__init__(self, logger, "JSTest", js_filename) self.num_clients = JSTestCase.DEFAULT_CLIENT_NUM self.test_case_template = _SingleJSTestCase(logger, js_filename, shell_executable, shell_options) - def configure(self, fixture, num_clients=DEFAULT_CLIENT_NUM, *args, **kwargs): + def configure( # pylint: disable=arguments-differ,keyword-arg-before-vararg + self, fixture, num_clients=DEFAULT_CLIENT_NUM, *args, **kwargs): + """Configure the jstest.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) self.num_clients = num_clients self.test_case_template.configure(fixture, *args, **kwargs) @@ -161,12 +149,10 @@ class JSTestCase(interface.ProcessTestCase): def _make_process(self): # This function should only be called by interface.py's as_command(). - return self.test_case_template._make_process() + return self.test_case_template._make_process() # pylint: disable=protected-access def _get_shell_options_for_thread(self, thread_id): - """ - Get shell_options with an initialized TestData object for given thread. - """ + """Get shell_options with an initialized TestData object for given thread.""" # We give each _SingleJSTestCase its own copy of the shell_options. shell_options = self.test_case_template.shell_options.copy() @@ -187,9 +173,7 @@ class JSTestCase(interface.ProcessTestCase): return shell_options def _create_test_case_for_thread(self, logger, thread_id): - """ - Create and configure a _SingleJSTestCase to be run in a separate thread. - """ + """Create and configure a _SingleJSTestCase to be run in a separate thread.""" shell_options = self._get_shell_options_for_thread(thread_id) test_case = _SingleJSTestCase(logger, self.test_case_template.js_filename, @@ -244,6 +228,7 @@ class JSTestCase(interface.ProcessTestCase): raise thread.exc_info def run_test(self): + """Execute the test.""" if self.num_clients == 1: self._run_single_copy() else: diff --git a/buildscripts/resmokelib/testing/testcases/mongos_test.py b/buildscripts/resmokelib/testing/testcases/mongos_test.py index 64b39a32cd9..9914ba8677f 100644 --- a/buildscripts/resmokelib/testing/testcases/mongos_test.py +++ b/buildscripts/resmokelib/testing/testcases/mongos_test.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for mongos --test. -""" +"""The unittest.TestCase for mongos --test.""" from __future__ import absolute_import @@ -11,16 +9,12 @@ from ... import utils class MongosTestCase(interface.ProcessTestCase): - """ - A TestCase which runs a mongos binary with the given parameters. - """ + """A TestCase which runs a mongos binary with the given parameters.""" REGISTERED_NAME = "mongos_test" def __init__(self, logger, mongos_options): - """ - Initializes the mongos test and saves the options. - """ + """Initialize the mongos test and saves the options.""" self.mongos_executable = utils.default_if_none(config.MONGOS_EXECUTABLE, config.DEFAULT_MONGOS_EXECUTABLE) @@ -29,9 +23,7 @@ class MongosTestCase(interface.ProcessTestCase): self.options = mongos_options.copy() def configure(self, fixture, *args, **kwargs): - """ - Ensures the --test option is present in the mongos options. - """ + """Ensure the --test option is present in the mongos options.""" interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) # Always specify test option to ensure the mongos will terminate. diff --git a/buildscripts/resmokelib/testing/testcases/pytest.py b/buildscripts/resmokelib/testing/testcases/pytest.py index 6c1c343f197..fe62b18e154 100644 --- a/buildscripts/resmokelib/testing/testcases/pytest.py +++ b/buildscripts/resmokelib/testing/testcases/pytest.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for Python unittests. -""" +"""The unittest.TestCase for Python unittests.""" import os import unittest @@ -8,13 +6,16 @@ from buildscripts.resmokelib.testing.testcases import interface class PyTestCase(interface.TestCase): + """A python test to execute.""" REGISTERED_NAME = "py_test" def __init__(self, logger, py_filename): + """Initialize PyTestCase.""" interface.TestCase.__init__(self, logger, "PyTest", py_filename) def run_test(self): + """Execute the test.""" suite = unittest.defaultTestLoader.loadTestsFromName(self.test_module_name) result = unittest.TextTestRunner().run(suite) if result.failures: @@ -24,6 +25,7 @@ class PyTestCase(interface.TestCase): self.return_code = 0 def as_command(self): + """Return execute command.""" return "python -m unittest {}".format(self.test_module_name) @property diff --git a/buildscripts/resmokelib/testing/testcases/sleeptest.py b/buildscripts/resmokelib/testing/testcases/sleeptest.py index f521ecea870..163100095d3 100644 --- a/buildscripts/resmokelib/testing/testcases/sleeptest.py +++ b/buildscripts/resmokelib/testing/testcases/sleeptest.py @@ -1,6 +1,4 @@ -""" -unittest.TestCase for sleeping a given amount of time. -""" +"""The unittest.TestCase for sleeping a given amount of time.""" from __future__ import absolute_import @@ -10,13 +8,12 @@ from . import interface class SleepTestCase(interface.TestCase): + """SleepTestCase class.""" REGISTERED_NAME = "sleep_test" def __init__(self, logger, sleep_duration_secs): - """ - Initializes the SleepTestCase with the duration to sleep for. - """ + """Initialize the SleepTestCase with the duration to sleep for.""" sleep_duration_secs = int(sleep_duration_secs) @@ -26,11 +23,10 @@ class SleepTestCase(interface.TestCase): self.__sleep_duration_secs = sleep_duration_secs def run_test(self): + """Execute sleep.""" time.sleep(self.__sleep_duration_secs) self.return_code = 0 def as_command(self): - """ - Returns the command invocation used to run the test. - """ + """Return the command invocation used to run the test.""" return "sleep {:d}".format(self.__sleep_duration_secs) diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py index 57dc8705319..1db36de5cb2 100644 --- a/buildscripts/resmokelib/utils/__init__.py +++ b/buildscripts/resmokelib/utils/__init__.py @@ -1,6 +1,4 @@ -""" -Helper functions. -""" +"""Helper functions.""" from __future__ import absolute_import from __future__ import print_function @@ -16,9 +14,7 @@ from . import archival @contextlib.contextmanager def open_or_use_stdout(filename): - """ - Opens the specified file for writing, or returns sys.stdout if filename is "-". - """ + """Open the specified file for writing, or returns sys.stdout if filename is "-".""" if filename == "-": yield sys.stdout @@ -38,42 +34,32 @@ def open_or_use_stdout(filename): def default_if_none(value, default): + """Set default if value is 'None'.""" return value if value is not None else default def is_string_list(lst): - """ - Returns true if 'lst' is a list of strings, and false otherwise. - """ + """Return true if 'lst' is a list of strings, and false otherwise.""" return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst) def is_string_set(value): - """ - Returns true if 'value' is a set of strings, and false otherwise. - """ + """Return true if 'value' is a set of strings, and false otherwise.""" return isinstance(value, set) and all(isinstance(x, basestring) for x in value) def is_js_file(filename): - """ - Returns true if 'filename' ends in .js, and false otherwise. - """ + """Return true if 'filename' ends in .js, and false otherwise.""" return os.path.splitext(filename)[1] == ".js" def is_yaml_file(filename): - """ - Returns true if 'filename' ends in .yml or .yaml, and false - otherwise. - """ + """Return true if 'filename' ends in .yml or .yaml, and false otherwise.""" return os.path.splitext(filename)[1] in (".yaml", ".yml") def load_yaml_file(filename): - """ - Attempts to read 'filename' as YAML. - """ + """Attempt to read 'filename' as YAML.""" try: with open(filename, "r") as fp: return yaml.safe_load(fp) @@ -82,17 +68,13 @@ def load_yaml_file(filename): def dump_yaml(value): - """ - Returns 'value' formatted as YAML. - """ + """Return 'value' formatted as YAML.""" # Use block (indented) style for formatting YAML. return yaml.safe_dump(value, default_flow_style=False).rstrip() def load_yaml(value): - """ - Attempts to parse 'value' as YAML. - """ + """Attempt to parse 'value' as YAML.""" try: return yaml.safe_load(value) except yaml.YAMLError as err: diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py index 999e56b99ae..ecbde08c675 100644 --- a/buildscripts/resmokelib/utils/archival.py +++ b/buildscripts/resmokelib/utils/archival.py @@ -1,6 +1,4 @@ -""" -Archival utility. -""" +"""Archival utility.""" from __future__ import absolute_import @@ -30,7 +28,7 @@ ArchiveArgs = collections.namedtuple("ArchiveArgs", def file_list_size(files): - """ Return size (in bytes) of all 'files' and their subdirectories. """ + """Return size (in bytes) of all 'files' and their subdirectories.""" if isinstance(files, str): files = [files] file_bytes = 0 @@ -45,7 +43,7 @@ def file_list_size(files): def directory_size(directory): - """ Return size (in bytes) of files in 'directory' tree. """ + """Return size (in bytes) of files in 'directory' tree.""" dir_bytes = 0 for root_dir, _, files in os.walk(unicode(directory)): for name in files: @@ -62,20 +60,19 @@ def directory_size(directory): def free_space(path): - """ Return file system free space (in bytes) for 'path'. """ + """Return file system free space (in bytes) for 'path'.""" if _IS_WINDOWS: dirname = os.path.dirname(path) free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(dirname), None, None, ctypes.pointer(free_bytes)) return free_bytes.value - else: - stat = os.statvfs(path) - return stat.f_bavail * stat.f_bsize + stat = os.statvfs(path) + return stat.f_bavail * stat.f_bsize def remove_file(file_name): - """ Attempts to remove file. Returns status and message. """ + """Attempt to remove file. Return status and message.""" try: # File descriptors, on Windows, are inherited by all subprocesses and file removal may fail # because the file is still open. @@ -83,18 +80,19 @@ def remove_file(file_name): os.remove(file_name) status = 0 message = "Successfully deleted file {}".format(file_name) - except Exception as err: + except Exception as err: # pylint: disable=broad-except status = 1 message = "Error deleting file {}: {}".format(file_name, err) return status, message -class Archival(object): - """ Class to support file archival to S3.""" +class Archival(object): # pylint: disable=too-many-instance-attributes + """Class to support file archival to S3.""" - def __init__(self, logger, archival_json_file="archive.json", limit_size_mb=0, limit_files=0, - s3_client=None): - """ Archival init method. """ + def __init__( # pylint: disable=too-many-arguments + self, logger, archival_json_file="archive.json", limit_size_mb=0, limit_files=0, + s3_client=None): + """Initialize Archival.""" self.archival_json_file = archival_json_file self.limit_size_mb = limit_size_mb @@ -134,14 +132,13 @@ class Archival(object): return boto3.client("s3") def archive_files_to_s3(self, display_name, input_files, s3_bucket, s3_path): - """ - Archive 'input_files' to 's3_bucket' and 's3_path'. + """Archive 'input_files' to 's3_bucket' and 's3_path'. Archive is not done if user specified limits are reached. The size limit is enforced after it has been exceeded, since it can only be calculated after the tar/gzip has been done. - Returns status and message, where message contains information if status is non-0. + Return status and message, where message contains information if status is non-0. """ start_time = time.time() @@ -168,7 +165,7 @@ class Archival(object): @staticmethod def _update_archive_file_wkr(queue, logger): - """ Worker thread: Update the archival JSON file from 'queue'. """ + """Worker thread: Update the archival JSON file from 'queue'.""" archival_json = [] while True: archive_args = queue.get() @@ -189,7 +186,7 @@ class Archival(object): @staticmethod def _upload_to_s3_wkr(queue, archive_file_queue, logger, s3_client): - """" Worker thread: Upload to S3 from 'queue', dispatch to 'archive_file_queue'. """ + """Worker thread: Upload to S3 from 'queue', dispatch to 'archive_file_queue'.""" while True: upload_args = queue.get() # Exit worker thread when sentinel is received. @@ -207,7 +204,7 @@ class Archival(object): upload_completed = True logger.debug("Upload to S3 completed for %s to bucket %s path %s", upload_args.local_file, upload_args.s3_bucket, upload_args.s3_path) - except Exception as err: + except Exception as err: # pylint: disable=broad-except logger.exception("Upload to S3 error %s", err) if upload_args.delete_file: @@ -274,6 +271,7 @@ class Archival(object): return status, message, size_mb def check_thread(self, thread, expected_alive): + """Check if the thread is still active.""" if thread.isAlive() and not expected_alive: self.logger.warning( "The %s thread did not complete, some files might not have been uploaded" @@ -284,7 +282,7 @@ class Archival(object): " to S3 or archived to %s.", thread.name, self.archival_json_file) def exit(self, timeout=30): - """ Waits for worker threads to finish. """ + """Wait for worker threads to finish.""" # Put sentinel on upload queue to trigger worker thread exit. self._upload_queue.put(None) self.check_thread(self._upload_worker, True) @@ -300,9 +298,9 @@ class Archival(object): self.archive_time, self.num_files, self.size_mb) def files_archived_num(self): - """ Returns the number of the archived files. """ + """Return the number of the archived files.""" return self.num_files def files_archived_size_mb(self): - """ Returns the size of the archived files. """ + """Return the size of the archived files.""" return self.size_mb diff --git a/buildscripts/resmokelib/utils/autoloader.py b/buildscripts/resmokelib/utils/autoloader.py index 1ac58abc892..73b58563451 100644 --- a/buildscripts/resmokelib/utils/autoloader.py +++ b/buildscripts/resmokelib/utils/autoloader.py @@ -1,6 +1,4 @@ -""" -Utility for loading all modules within a package. -""" +"""Utility for loading all modules within a package.""" from __future__ import absolute_import @@ -9,8 +7,7 @@ import pkgutil def load_all_modules(name, path): - """ - Dynamically loads all modules in the 'name' package. + """Dynamically load all modules in the 'name' package. This function is useful in combination with the registry.py module so that any classes declared within the package are automatically diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py index 443d75b6b0c..1e016875f94 100644 --- a/buildscripts/resmokelib/utils/globstar.py +++ b/buildscripts/resmokelib/utils/globstar.py @@ -1,6 +1,4 @@ -""" -Filename globbing utility. -""" +"""Filename globbing utility.""" from __future__ import absolute_import @@ -13,18 +11,15 @@ _GLOBSTAR = "**" _CONTAINS_GLOB_PATTERN = re.compile("[*?[]") -def is_glob_pattern(s): - """ - Returns true if 's' represents a glob pattern, and false otherwise. - """ +def is_glob_pattern(string): + """Return true if 'string' represents a glob pattern, and false otherwise.""" # Copied from glob.has_magic(). - return _CONTAINS_GLOB_PATTERN.search(s) is not None + return _CONTAINS_GLOB_PATTERN.search(string) is not None def glob(globbed_pathname): - """ - Return a list of pathnames matching the 'globbed_pathname' pattern. + """Return a list of pathnames matching the 'globbed_pathname' pattern. In addition to containing simple shell-style wildcards a la fnmatch, the pattern may also contain globstars ("**"), which is recursively @@ -35,8 +30,7 @@ def glob(globbed_pathname): def iglob(globbed_pathname): - """ - Emit a list of pathnames matching the 'globbed_pathname' pattern. + """Emit a list of pathnames matching the 'globbed_pathname' pattern. In addition to containing simple shell-style wildcards a la fnmatch, the pattern may also contain globstars ("**"), which is recursively @@ -79,9 +73,7 @@ def iglob(globbed_pathname): def _split_path(pathname): - """ - Return 'pathname' as a list of path components. - """ + """Return 'pathname' as a list of path components.""" parts = [] @@ -100,45 +92,43 @@ def _split_path(pathname): def _canonicalize(parts): - """ - Return a copy of 'parts' with consecutive "**"s coalesced. + """Return a copy of 'parts' with consecutive "**"s coalesced. + Raise a ValueError for unsupported uses of "**". """ res = [] prev_was_globstar = False - for p in parts: - if p == _GLOBSTAR: + for part in parts: + if part == _GLOBSTAR: # Skip consecutive **'s if not prev_was_globstar: prev_was_globstar = True - res.append(p) - elif _GLOBSTAR in p: # a/b**/c or a/**b/c + res.append(part) + elif _GLOBSTAR in part: # a/b**/c or a/**b/c raise ValueError("Can only specify glob patterns of the form a/**/b") else: prev_was_globstar = False - res.append(p) + res.append(part) return res def _find_globstar(parts): - """ - Return the index of the first occurrence of "**" in 'parts'. + """Return the index of the first occurrence of "**" in 'parts'. + Return -1 if "**" is not found in the list. """ - for (i, p) in enumerate(parts): - if p == _GLOBSTAR: - return i + for (idx, part) in enumerate(parts): + if part == _GLOBSTAR: + return idx return -1 def _list_dir(pathname): - """ - Return a pair of the subdirectory names and filenames immediately - contained within the 'pathname' directory. + """Return a pair of subdirectory names and filenames contained within the 'pathname' directory. If 'pathname' does not exist, then None is returned. """ @@ -151,9 +141,9 @@ def _list_dir(pathname): def _expand(pathname): - """ - Emit tuples of the form ("dir", dirname) and ("file", filename) - of all directories and files contained within the 'pathname' directory. + """Emit tuples of the form ("dir", dirname) and ("file", filename). + + The result is for all directories and files contained within the 'pathname' directory. """ res = _list_dir(pathname) @@ -166,20 +156,20 @@ def _expand(pathname): if os.path.basename(pathname): yield ("dir", os.path.join(pathname, "")) - for f in files: - path = os.path.join(pathname, f) + for fname in files: + path = os.path.join(pathname, fname) yield ("file", path) - for d in dirs: - path = os.path.join(pathname, d) - for x in _expand(path): - yield x + for dname in dirs: + path = os.path.join(pathname, dname) + for xpath in _expand(path): + yield xpath def _expand_curdir(pathname): - """ - Emit tuples of the form ("dir", dirname) and ("file", filename) - of all directories and files contained within the 'pathname' directory. + """Emit tuples of the form ("dir", dirname) and ("file", filename). + + The result is for all directories and files contained within the 'pathname' directory. The returned pathnames omit a "./" prefix. """ @@ -193,9 +183,9 @@ def _expand_curdir(pathname): # Zero expansion yield ("dir", "") - for f in files: - yield ("file", f) + for fname in files: + yield ("file", fname) - for d in dirs: - for x in _expand(d): - yield x + for dname in dirs: + for xdir in _expand(dname): + yield xdir diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py index 43484573fac..67758197c5c 100644 --- a/buildscripts/resmokelib/utils/jscomment.py +++ b/buildscripts/resmokelib/utils/jscomment.py @@ -1,6 +1,4 @@ -""" -Utility for parsing JS comments. -""" +"""Utility for parsing JS comments.""" from __future__ import absolute_import @@ -13,9 +11,9 @@ _JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL) def get_tags(pathname): - """ - Returns the list of tags found in the (JS-style) comments of - 'pathname'. The definition can span multiple lines, use unquoted, + """Return the list of tags found in the (JS-style) comments of 'pathname'. + + The definition can span multiple lines, use unquoted, single-quoted, or double-quoted strings, and use the '#' character for inline commenting. @@ -48,9 +46,10 @@ def get_tags(pathname): return [] -def _strip_jscomments(s): - """ - Given a string 's' that represents the contents after the "@tags:" +def _strip_jscomments(string): + """Strip JS comments from a 'string'. + + Given a string 'string' that represents the contents after the "@tags:" annotation in the JS file, this function returns a string that can be converted to YAML. @@ -69,7 +68,7 @@ def _strip_jscomments(s): yaml_lines = [] - for line in s.splitlines(): + for line in string.splitlines(): # Remove leading whitespace and symbols that commonly appear in JS comments. line = line.lstrip("\t ").lstrip("*/") yaml_lines.append(line) diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py index da059ffd852..c77692138b1 100644 --- a/buildscripts/resmokelib/utils/queue.py +++ b/buildscripts/resmokelib/utils/queue.py @@ -1,5 +1,4 @@ -""" -Extension to the Queue.Queue class. +"""Extension to the Queue.Queue class. Added support for the join() method to take a timeout. This is necessary in order for KeyboardInterrupt exceptions to get propagated. @@ -9,22 +8,18 @@ See https://bugs.python.org/issue1167930 for more details. from __future__ import absolute_import -import Queue +import Queue as _Queue import time # Exception that is raised when get_nowait() is called on an empty Queue. -Empty = Queue.Empty +Empty = _Queue.Empty -class Queue(Queue.Queue): - """ - A multi-producer, multi-consumer queue. - """ +class Queue(_Queue.Queue): + """A multi-producer, multi-consumer queue.""" - def join(self, timeout=None): - """ - Wait until all items in the queue have been retrieved and processed, - or until 'timeout' seconds have passed. + def join(self, timeout=None): # pylint: disable=arguments-differ + """Wait until all items in the queue have been processed or 'timeout' seconds have passed. The count of unfinished tasks is incremented whenever an item is added to the queue. The count is decremented whenever task_done() is called diff --git a/buildscripts/resmokelib/utils/registry.py b/buildscripts/resmokelib/utils/registry.py index 0a18c556e94..0aa02f4b2b5 100644 --- a/buildscripts/resmokelib/utils/registry.py +++ b/buildscripts/resmokelib/utils/registry.py @@ -1,6 +1,6 @@ -""" -Utility for having class declarations automatically cause a reference to -the class to be stored along with its name. +"""Utility for having class declarations. + +The registry automatically causes a reference to the class to be stored along with its name. This pattern enables the associated class to be looked up later by using its name. @@ -15,23 +15,19 @@ LEAVE_UNREGISTERED = object() def make_registry_metaclass(registry_store): - """ - Returns a new Registry metaclass. - """ + """Return a new Registry metaclass.""" if not isinstance(registry_store, dict): raise TypeError("'registry_store' argument must be a dict") class Registry(type): - """ - A metaclass that stores a reference to all registered classes. - """ + """A metaclass that stores a reference to all registered classes.""" - def __new__(meta, class_name, base_classes, class_dict): - """ - Creates and returns a new instance of Registry, which is a - class named 'class_name' derived from 'base_classes' that - defines 'class_dict' as additional attributes. + def __new__(mcs, class_name, base_classes, class_dict): + """Create and returns a new instance of Registry. + + The registry is a class named 'class_name' derived from 'base_classes' + that defines 'class_dict' as additional attributes. The returned class is added to 'registry_store' using class_dict["REGISTERED_NAME"] as the name, or 'class_name' @@ -46,7 +42,7 @@ def make_registry_metaclass(registry_store): """ registered_name = class_dict.setdefault("REGISTERED_NAME", class_name) - cls = type.__new__(meta, class_name, base_classes, class_dict) + cls = type.__new__(mcs, class_name, base_classes, class_dict) if registered_name is not LEAVE_UNREGISTERED: if registered_name in registry_store: diff --git a/buildscripts/resmokelib/utils/scheduler.py b/buildscripts/resmokelib/utils/scheduler.py index 753fc217745..04abafcd330 100644 --- a/buildscripts/resmokelib/utils/scheduler.py +++ b/buildscripts/resmokelib/utils/scheduler.py @@ -1,7 +1,4 @@ -""" -A thread-safe version of sched.scheduler since the class wasn't made -thread-safe until Python 3.3. -""" +"""Thread-safe version of sched.scheduler; the class wasn't made thread-safe until Python 3.3.""" from __future__ import absolute_import @@ -11,34 +8,38 @@ import threading class Scheduler(sched.scheduler): - """ - A thread-safe, general purpose event scheduler. - """ + """A thread-safe, general purpose event scheduler.""" def __init__(self, timefunc, delayfunc): + """Initialize Scheduler.""" sched.scheduler.__init__(self, timefunc, delayfunc) # We use a recursive lock because sched.scheduler.enter() calls sched.scheduler.enterabs(). self._queue_lock = threading.RLock() def enterabs(self, time, priority, action, argument): + """Enterabs.""" with self._queue_lock: return sched.scheduler.enterabs(self, time, priority, action, argument) def enter(self, delay, priority, action, argument): + """Enter.""" with self._queue_lock: return sched.scheduler.enter(self, delay, priority, action, argument) def cancel(self, event): + """Cancel.""" with self._queue_lock: return sched.scheduler.cancel(self, event) def empty(self): + """Empty.""" with self._queue_lock: return sched.scheduler.empty(self) # The implementation for the run() method was adapted from sched.scheduler.run() in Python 3.6. def run(self): + """Run.""" while True: with self._queue_lock: if not self._queue: @@ -62,5 +63,6 @@ class Scheduler(sched.scheduler): @property def queue(self): + """Get Queue.""" with self._queue_lock: return sched.scheduler.queue.fget(self) |