summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Abrahams <jonathan@mongodb.com>2018-03-27 14:30:46 -0400
committerJonathan Abrahams <jonathan@mongodb.com>2018-04-05 14:41:58 -0400
commitc50c68fef179d9306f1a3432f48985bf20555e38 (patch)
treea1c208329a090c54a8a1f02558b2be87b830a8ab
parenta5dacf7092f51055dd774a1911a48815bb9a1e0e (diff)
downloadmongo-c50c68fef179d9306f1a3432f48985bf20555e38.tar.gz
SERVER-23312 Python linting - Lint using pylint, pydocstyle & mypy
-rw-r--r--.pydocstyle12
-rw-r--r--.pylintrc25
-rw-r--r--SConstruct10
-rw-r--r--buildscripts/.pydocstyle7
-rw-r--r--buildscripts/.pylintrc11
-rw-r--r--buildscripts/__init__.py2
-rw-r--r--buildscripts/aggregate_tracefiles.py19
-rwxr-xr-xbuildscripts/aws_ec2.py30
-rw-r--r--buildscripts/burn_in_tests.py67
-rw-r--r--buildscripts/bypass_compile_and_fetch_binaries.py36
-rw-r--r--buildscripts/ciconfig/evergreen.py65
-rw-r--r--buildscripts/ciconfig/tags.py20
-rwxr-xr-xbuildscripts/clang_format.py96
-rwxr-xr-xbuildscripts/collect_resource_info.py7
-rwxr-xr-xbuildscripts/combine_reports.py25
-rwxr-xr-xbuildscripts/errorcodes.py215
-rwxr-xr-xbuildscripts/eslint.py93
-rwxr-xr-xbuildscripts/evergreen_run_tests.py48
-rwxr-xr-xbuildscripts/fetch_test_lifecycle.py40
-rw-r--r--buildscripts/gdb/mongo.py104
-rw-r--r--buildscripts/gdb/mongo_lock.py134
-rw-r--r--buildscripts/gdb/mongo_printers.py123
-rwxr-xr-xbuildscripts/generate_compile_expansions.py25
-rwxr-xr-xbuildscripts/generate_compile_expansions_shared_cache.py25
-rw-r--r--buildscripts/git.py23
-rwxr-xr-xbuildscripts/hang_analyzer.py347
-rw-r--r--buildscripts/idl/idl/binder.py8
-rw-r--r--buildscripts/idl/idl/cpp_types.py55
-rw-r--r--buildscripts/idl/idl/enum_types.py8
-rw-r--r--buildscripts/idl/idl/errors.py14
-rw-r--r--buildscripts/idl/idl/generator.py66
-rw-r--r--buildscripts/idl/idl/parser.py14
-rw-r--r--buildscripts/idl/idl/struct_types.py19
-rw-r--r--buildscripts/idl/idlc.py2
-rw-r--r--buildscripts/idl/tests/testcase.py5
-rw-r--r--buildscripts/jiraclient.py6
-rwxr-xr-xbuildscripts/lifecycle_test_failures.py (renamed from buildscripts/test_failures.py)140
-rw-r--r--buildscripts/lint.py49
-rw-r--r--buildscripts/linter/base.py6
-rw-r--r--buildscripts/linter/git.py6
-rw-r--r--buildscripts/linter/mypy.py17
-rw-r--r--buildscripts/linter/pydocstyle.py2
-rw-r--r--buildscripts/linter/pylint.py12
-rw-r--r--buildscripts/linter/runner.py6
-rwxr-xr-xbuildscripts/make_archive.py53
-rw-r--r--buildscripts/make_vcxproj.py58
-rw-r--r--buildscripts/moduleconfig.py32
-rwxr-xr-xbuildscripts/mongosymb.py65
-rw-r--r--buildscripts/msitrim.py64
-rwxr-xr-xbuildscripts/packager.py349
-rwxr-xr-xbuildscripts/packager_enterprise.py (renamed from buildscripts/packager-enterprise.py)149
-rw-r--r--buildscripts/promote_silent_failures.py5
-rw-r--r--buildscripts/prune_check.py60
-rwxr-xr-xbuildscripts/pylinters.py14
-rwxr-xr-xbuildscripts/remote_operations.py43
-rw-r--r--buildscripts/requirements.txt6
-rwxr-xr-xbuildscripts/resmoke.py47
-rw-r--r--buildscripts/resmokeconfig/__init__.py1
-rw-r--r--buildscripts/resmokeconfig/loggers/__init__.py11
-rw-r--r--buildscripts/resmokeconfig/suites/__init__.py11
-rw-r--r--buildscripts/resmokelib/config.py24
-rw-r--r--buildscripts/resmokelib/core/__init__.py1
-rw-r--r--buildscripts/resmokelib/core/network.py32
-rw-r--r--buildscripts/resmokelib/core/pipe.py25
-rw-r--r--buildscripts/resmokelib/core/process.py39
-rw-r--r--buildscripts/resmokelib/core/programs.py65
-rw-r--r--buildscripts/resmokelib/errors.py56
-rw-r--r--buildscripts/resmokelib/logging/__init__.py4
-rw-r--r--buildscripts/resmokelib/logging/buildlogger.py99
-rw-r--r--buildscripts/resmokelib/logging/flush.py76
-rw-r--r--buildscripts/resmokelib/logging/formatters.py17
-rw-r--r--buildscripts/resmokelib/logging/handlers.py64
-rw-r--r--buildscripts/resmokelib/logging/loggers.py45
-rw-r--r--buildscripts/resmokelib/parser.py34
-rw-r--r--buildscripts/resmokelib/reportfile.py9
-rw-r--r--buildscripts/resmokelib/selector.py122
-rw-r--r--buildscripts/resmokelib/sighandler.py45
-rw-r--r--buildscripts/resmokelib/suitesconfig.py14
-rw-r--r--buildscripts/resmokelib/testing/__init__.py5
-rw-r--r--buildscripts/resmokelib/testing/executor.py70
-rw-r--r--buildscripts/resmokelib/testing/fixtures/__init__.py6
-rw-r--r--buildscripts/resmokelib/testing/fixtures/interface.py96
-rw-r--r--buildscripts/resmokelib/testing/fixtures/replicaset.py75
-rw-r--r--buildscripts/resmokelib/testing/fixtures/shardedcluster.py78
-rw-r--r--buildscripts/resmokelib/testing/fixtures/standalone.py22
-rw-r--r--buildscripts/resmokelib/testing/fixtures/yesfixture.py13
-rw-r--r--buildscripts/resmokelib/testing/hook_test_archival.py19
-rw-r--r--buildscripts/resmokelib/testing/hooks/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/check_primary.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/cleanup.py15
-rw-r--r--buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py35
-rw-r--r--buildscripts/resmokelib/testing/hooks/dbhash.py17
-rw-r--r--buildscripts/resmokelib/testing/hooks/initialsync.py36
-rw-r--r--buildscripts/resmokelib/testing/hooks/interface.py53
-rw-r--r--buildscripts/resmokelib/testing/hooks/jsfile.py24
-rw-r--r--buildscripts/resmokelib/testing/hooks/oplog.py19
-rw-r--r--buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py26
-rw-r--r--buildscripts/resmokelib/testing/hooks/stepdown.py32
-rw-r--r--buildscripts/resmokelib/testing/hooks/validate.py18
-rw-r--r--buildscripts/resmokelib/testing/job.py70
-rw-r--r--buildscripts/resmokelib/testing/report.py131
-rw-r--r--buildscripts/resmokelib/testing/suite.py113
-rw-r--r--buildscripts/resmokelib/testing/summary.py8
-rw-r--r--buildscripts/resmokelib/testing/testcases/__init__.py6
-rw-r--r--buildscripts/resmokelib/testing/testcases/benchmark_test.py16
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_integration_test.py13
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_unittest.py12
-rw-r--r--buildscripts/resmokelib/testing/testcases/dbtest.py16
-rw-r--r--buildscripts/resmokelib/testing/testcases/fsm_workload_test.py7
-rw-r--r--buildscripts/resmokelib/testing/testcases/interface.py81
-rw-r--r--buildscripts/resmokelib/testing/testcases/json_schema_test.py7
-rw-r--r--buildscripts/resmokelib/testing/testcases/jsrunnerfile.py15
-rw-r--r--buildscripts/resmokelib/testing/testcases/jstest.py53
-rw-r--r--buildscripts/resmokelib/testing/testcases/mongos_test.py16
-rw-r--r--buildscripts/resmokelib/testing/testcases/pytest.py8
-rw-r--r--buildscripts/resmokelib/testing/testcases/sleeptest.py14
-rw-r--r--buildscripts/resmokelib/utils/__init__.py38
-rw-r--r--buildscripts/resmokelib/utils/archival.py48
-rw-r--r--buildscripts/resmokelib/utils/autoloader.py7
-rw-r--r--buildscripts/resmokelib/utils/globstar.py84
-rw-r--r--buildscripts/resmokelib/utils/jscomment.py19
-rw-r--r--buildscripts/resmokelib/utils/queue.py19
-rw-r--r--buildscripts/resmokelib/utils/registry.py26
-rw-r--r--buildscripts/resmokelib/utils/scheduler.py16
-rwxr-xr-xbuildscripts/scons.py13
-rw-r--r--buildscripts/scons_cache_prune.py46
-rwxr-xr-xbuildscripts/setup_multiversion_mongodb.py73
-rw-r--r--buildscripts/tests/ciconfig/test_evergreen.py2
-rw-r--r--buildscripts/tests/ciconfig/test_tags.py2
-rw-r--r--buildscripts/tests/resmokelib/logging/__init__.py1
-rw-r--r--buildscripts/tests/resmokelib/logging/test_buildlogger.py2
-rw-r--r--buildscripts/tests/resmokelib/test_archival.py2
-rw-r--r--buildscripts/tests/resmokelib/test_selector.py11
-rw-r--r--buildscripts/tests/resmokelib/testing/fixtures/__init__.py1
-rw-r--r--buildscripts/tests/resmokelib/testing/fixtures/test_interface.py6
-rwxr-xr-xbuildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py27
-rwxr-xr-xbuildscripts/tests/test_aws_ec2.py4
-rw-r--r--buildscripts/tests/test_fetch_test_lifecycle.py4
-rw-r--r--buildscripts/tests/test_git.py6
-rw-r--r--buildscripts/tests/test_lifecycle_test_failures.py (renamed from buildscripts/tests/test_test_failures.py)4
-rwxr-xr-xbuildscripts/tests/test_remote_operations.py8
-rw-r--r--buildscripts/tests/test_update_test_lifecycle.py6
-rwxr-xr-xbuildscripts/update_test_lifecycle.py128
-rw-r--r--buildscripts/utils.py87
-rwxr-xr-xbuildscripts/yaml_key_value.py7
-rw-r--r--etc/evergreen.yml76
-rw-r--r--mypy.ini20
-rwxr-xr-xpytests/powertest.py610
-rw-r--r--pytests/requirements.txt3
149 files changed, 3141 insertions, 3302 deletions
diff --git a/.pydocstyle b/.pydocstyle
new file mode 100644
index 00000000000..dc8da3eaf1f
--- /dev/null
+++ b/.pydocstyle
@@ -0,0 +1,12 @@
+# See https://readthedocs.org/projects/pydocstyle/
+[pydocstyle]
+inherit = false
+# D105 - Missing docstring in magic method
+# D202 - No blank lines allowed after function docstring
+# D203 - 1 blank line required before class docstring
+# D212 - Multi-line docstring summary should start at the first line
+# D213 - Multi-line docstring summary should start at the second line
+# D301 - Use r""" if any backslashes in a docstring
+ignore = D105,D202,D203,D212,D213,D301
+# Do not run on buildscripts/tests/
+match = ^((?!buildscripts\/tests\/).)*$
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 00000000000..7bd38008874
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,25 @@
+# See https://www.pylint.org/
+[BASIC]
+# Permit 2 character & long argument names, like db
+argument-rgx=[a-z_][a-z0-9_]{1,50}$
+# Long attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,50}$
+# Long function names
+function-rgx=[a-z_][a-z0-9_]{2,50}$
+# Long method names
+method-rgx=[a-z_][a-z0-9_]{2,50}$
+# Permit 2 character & long variable names, like sb
+variable-rgx=[a-z_][a-z0-9_]{1,50}$
+
+[MESSAGES CONTROL]
+# C0301 - line-too-long - some of the type annotations are longer then 100 columns
+# C0330 - bad-continuation - ignore conflicts produced by yapf formatting
+# E0401 - import-error - ignore imports that fail to load
+# E1101 - no-member - ignore maybe no member warnings
+# I0011 - locally-disabled - ignore warnings about disable pylint checks
+# R0204 - redefined-variable-type
+# R0903 - too-few-public-methods - pylint does not always know best
+# W0511 - fixme - ignore TODOs in comments
+# W0611 - unused-import - typing module is needed for mypy
+
+disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,redefined-variable-type,too-few-public-methods,unused-import
diff --git a/SConstruct b/SConstruct
index 723880d2e21..16a9e30b31b 100644
--- a/SConstruct
+++ b/SConstruct
@@ -509,7 +509,7 @@ try:
print("version.json does not contain a version string")
Exit(1)
if 'githash' not in version_data:
- version_data['githash'] = utils.getGitVersion()
+ version_data['githash'] = utils.get_git_version()
except IOError as e:
# If the file error wasn't because the file is missing, error out
@@ -518,8 +518,8 @@ except IOError as e:
Exit(1)
version_data = {
- 'version': utils.getGitDescribe()[1:],
- 'githash': utils.getGitVersion(),
+ 'version': utils.get_git_describe()[1:],
+ 'githash': utils.get_git_version(),
}
except ValueError as e:
@@ -3210,8 +3210,8 @@ if incremental_link.exists(env):
def checkErrorCodes():
import buildscripts.errorcodes as x
- if x.checkErrorCodes() == False:
- env.FatalError("next id to use: {0}", x.getNextCode())
+ if x.check_error_codes() == False:
+ env.FatalError("next id to use: {0}", x.get_next_code())
checkErrorCodes()
diff --git a/buildscripts/.pydocstyle b/buildscripts/.pydocstyle
deleted file mode 100644
index fdff3f6c351..00000000000
--- a/buildscripts/.pydocstyle
+++ /dev/null
@@ -1,7 +0,0 @@
-# See https://readthedocs.org/projects/pydocstyle/
-[pydocstyle]
-inherit = false
-# D202 - No blank lines allowed after function docstring
-# D203 - 1 blank line required before class docstring
-# D212 - Multi-line docstring summary should start at the first line
-ignore = D202,D203,D212
diff --git a/buildscripts/.pylintrc b/buildscripts/.pylintrc
deleted file mode 100644
index 23cbf07e3cf..00000000000
--- a/buildscripts/.pylintrc
+++ /dev/null
@@ -1,11 +0,0 @@
-# See https://www.pylint.org/
-[MESSAGES CONTROL]
-# C0301 - line-too-long - some of the type annotations are longer then 100 columns
-# C0330 - bad-continuation - ignore conflicts produced by yapf formatting
-# E0401 - import-error - ignore imports that fail to load
-# I0011 - locally-disabled - ignore warnings about disable pylint checks
-# R0903 - too-few-public-method - pylint does not always know best
-# W0511 - fixme - ignore TODOs in comments
-# W0611 - unused-import - typing module is needed for mypy
-
-disable=bad-continuation,fixme,import-error,line-too-long,locally-disabled,too-few-public-methods,unused-import
diff --git a/buildscripts/__init__.py b/buildscripts/__init__.py
index 8b137891791..4b7a2bb941b 100644
--- a/buildscripts/__init__.py
+++ b/buildscripts/__init__.py
@@ -1 +1 @@
-
+"""Empty."""
diff --git a/buildscripts/aggregate_tracefiles.py b/buildscripts/aggregate_tracefiles.py
index 8ff46194ebc..ef2c2d02615 100644
--- a/buildscripts/aggregate_tracefiles.py
+++ b/buildscripts/aggregate_tracefiles.py
@@ -1,15 +1,18 @@
+"""Aggregate_tracefiles module.
+
+This script aggregates several tracefiles into one tracefile.
+All but the last argument are input tracefiles or .txt files which list tracefiles.
+The last argument is the tracefile to which the output will be written.
+"""
+
import subprocess
import os
import sys
from optparse import OptionParser
-""" This script aggregates several tracefiles into one tracefile
- All but the last argument are input tracefiles or .txt files which list tracefiles.
- The last argument is the tracefile to which the output will be written
-"""
def aggregate(inputs, output):
- """Aggregates the tracefiles given in inputs to a tracefile given by output"""
+ """Aggregate the tracefiles given in inputs to a tracefile given by output."""
args = ['lcov']
for name in inputs:
@@ -23,23 +26,25 @@ def aggregate(inputs, output):
def getfilesize(path):
+ """Return file size of 'path'."""
if not os.path.isfile(path):
return 0
return os.path.getsize(path)
def main():
+ """Execute Main entry."""
inputs = []
usage = "usage: %prog input1.info input2.info ... output.info"
parser = OptionParser(usage=usage)
- (options, args) = parser.parse_args()
+ (_, args) = parser.parse_args()
if len(args) < 2:
return "must supply input files"
for path in args[:-1]:
- name, ext = os.path.splitext(path)
+ _, ext = os.path.splitext(path)
if ext == '.info':
if getfilesize(path) > 0:
diff --git a/buildscripts/aws_ec2.py b/buildscripts/aws_ec2.py
index e7ff255c119..8a92749d531 100755
--- a/buildscripts/aws_ec2.py
+++ b/buildscripts/aws_ec2.py
@@ -25,6 +25,7 @@ class AwsEc2(object):
])
def __init__(self):
+ """Initialize AwsEc2."""
try:
self.connection = boto3.resource("ec2")
except botocore.exceptions.BotoCoreError:
@@ -36,7 +37,9 @@ class AwsEc2(object):
@staticmethod
def wait_for_state(instance, state, wait_time_secs=0, show_progress=False):
"""Wait up to 'wait_time_secs' for instance to be in 'state'.
- Return 0 if 'state' reached, 1 otherwise."""
+
+ Return 0 if 'state' reached, 1 otherwise.
+ """
if show_progress:
print("Waiting for instance {} to reach '{}' state".format(instance, state), end="",
file=sys.stdout)
@@ -72,7 +75,7 @@ class AwsEc2(object):
return 0 if reached_state else 1
def control_instance(self, mode, image_id, wait_time_secs=0, show_progress=False):
- """Controls an AMI instance. Returns 0 & status information, if successful."""
+ """Control an AMI instance. Returns 0 & status information, if successful."""
if mode not in _MODES:
raise ValueError("Invalid mode '{}' specified, choose from {}.".format(mode, _MODES))
@@ -119,7 +122,7 @@ class AwsEc2(object):
return ret, status
def tag_instance(self, image_id, tags):
- """Tags an AMI instance. """
+ """Tag an AMI instance."""
if tags:
# It's possible that ClientError code InvalidInstanceID.NotFound could be returned,
# even if the 'image_id' exists. We will retry up to 5 times, with increasing wait,
@@ -135,12 +138,14 @@ class AwsEc2(object):
time.sleep(i + 1)
instance.create_tags(Tags=tags)
- def launch_instance(self, ami, instance_type, block_devices=None, key_name=None,
- security_group_ids=None, security_groups=None, subnet_id=None, tags=None,
- wait_time_secs=0, show_progress=False, **kwargs):
- """Launches and tags an AMI instance.
+ def launch_instance( # pylint: disable=too-many-arguments,too-many-locals
+ self, ami, instance_type, block_devices=None, key_name=None, security_group_ids=None,
+ security_groups=None, subnet_id=None, tags=None, wait_time_secs=0, show_progress=False,
+ **kwargs):
+ """Launch and tag an AMI instance.
- Returns the tuple (0, status_information), if successful."""
+ Return the tuple (0, status_information), if successful.
+ """
bdms = []
if block_devices is None:
@@ -177,8 +182,8 @@ class AwsEc2(object):
return self.control_instance("status", instance.instance_id)
-def main():
- """Main program."""
+def main(): # pylint: disable=too-many-locals,too-many-statements
+ """Execute Main program."""
required_create_options = ["ami", "key_name"]
@@ -242,9 +247,7 @@ def main():
" bracketed YAML - i.e. JSON with support for single quoted"
" and unquoted keys. Example, '{DryRun: True}'"))
- status_options.add_option("--yamlFile",
- dest="yaml_file",
- default=None,
+ status_options.add_option("--yamlFile", dest="yaml_file", default=None,
help="Save the status into the specified YAML file.")
parser.add_option_group(control_options)
@@ -309,5 +312,6 @@ def main():
print(yaml.safe_dump(status_dict))
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/burn_in_tests.py b/buildscripts/burn_in_tests.py
index 87a4098b87b..a1875c09995 100644
--- a/buildscripts/burn_in_tests.py
+++ b/buildscripts/burn_in_tests.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Command line utility for determining what jstests have been added or modified
-"""
+"""Command line utility for determining what jstests have been added or modified."""
from __future__ import absolute_import
@@ -12,22 +10,24 @@ import optparse
import os.path
import subprocess
import re
-import requests
import shlex
import sys
import urlparse
+
+import requests
import yaml
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts import resmokelib
-from buildscripts.ciconfig import evergreen
+from buildscripts import resmokelib # pylint: disable=wrong-import-position
+from buildscripts.ciconfig import evergreen # pylint: disable=wrong-import-position
API_SERVER_DEFAULT = "https://evergreen.mongodb.com"
def parse_command_line():
+ """Parse command line options."""
parser = optparse.OptionParser(usage="Usage: %prog [options] [resmoke command]")
parser.add_option("--maxRevisions", dest="max_revisions",
@@ -75,12 +75,12 @@ def parse_command_line():
def callo(args):
- """Call a program, and capture its output
- """
+ """Call a program, and capture its output."""
return subprocess.check_output(args)
def read_evg_config():
+ """Read evg config file."""
# Expand out evergreen config file possibilities
file_list = [
"./.evergreen.yml",
@@ -96,7 +96,7 @@ def read_evg_config():
def find_last_activated_task(revisions, variant, branch_name):
- """ Get the git hash of the most recently activated build before this one """
+ """Get the git hash of the most recently activated build before this one."""
rest_prefix = "/rest/v1/"
project = "mongodb-mongo-" + branch_name
build_prefix = "mongodb_mongo_" + branch_name + "_" + variant.replace('-', '_')
@@ -121,15 +121,16 @@ def find_last_activated_task(revisions, variant, branch_name):
build_data = build_resp.json()
if build_data["activated"]:
return build_data["revision"]
- except:
+ except: # pylint: disable=bare-except
# Sometimes build data is incomplete, as was the related build.
- next
+ pass
return None
def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, check_evergreen):
- """
+ """Find the changed tests.
+
Use git to find which files have changed in this patch.
TODO: This should be expanded to search for enterprise modules.
The returned file paths are in normalized form (see os.path.normpath(path)).
@@ -172,7 +173,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
# The lines with untracked files start with '?? '.
for line in untracked_files:
if line.startswith("?"):
- (status, line) = line.split(" ", 1)
+ (_, line) = line.split(" ", 1)
changed_files.append(line)
for line in changed_files:
@@ -187,9 +188,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
def find_exclude_tests(selector_file):
- """
- Parses etc/burn_in_tests.yml. Returns lists of excluded suites, tasks & tests.
- """
+ """Parse etc/burn_in_tests.yml. Returns lists of excluded suites, tasks & tests."""
if not selector_file:
return ([], [], [])
@@ -209,8 +208,8 @@ def find_exclude_tests(selector_file):
def filter_tests(tests, exclude_tests):
- """
- Excludes tests which have been blacklisted.
+ """Exclude tests which have been blacklisted.
+
A test is in the tests list, i.e., ['jstests/core/a.js']
The tests paths must be in normalized form (see os.path.normpath(path)).
"""
@@ -227,7 +226,8 @@ def filter_tests(tests, exclude_tests):
def find_tests_by_executor(suites):
- """
+ """Find tests by executor.
+
Looks up what other resmoke suites run the tests specified in the suites
parameter. Returns a dict keyed by test name, value is array of suite names.
"""
@@ -241,7 +241,8 @@ def find_tests_by_executor(suites):
def create_executor_list(suites, exclude_suites):
- """
+ """Create the executor list.
+
Looks up what other resmoke suites run the tests specified in the suites
parameter. Returns a dict keyed by suite name / executor, value is tests
to run under that executor.
@@ -257,8 +258,7 @@ def create_executor_list(suites, exclude_suites):
def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
- """
- Finds associated tasks for the specified buildvariant and suites.
+ """Find associated tasks for the specified buildvariant and suites.
Returns a dict keyed by task_name, with executor, resmoke_args & tests, i.e.,
{'jsCore_small_oplog':
@@ -288,25 +288,25 @@ def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
for suite in suites.keys():
for task_name, task_arg in variant_task_args.items():
# Find the resmoke_args for matching suite names.
- if re.compile('--suites=' + suite + '(?:\s+|$)').match(task_arg):
+ if re.compile('--suites=' + suite + r'(?:\s+|$)').match(task_arg):
tasks_to_run[task_name] = {"resmoke_args": task_arg, "tests": suites[suite]}
return tasks_to_run
def _write_report_file(tests_by_executor, pathname):
- """
- Writes out a JSON file containing the tests_by_executor dict. This should
- be done during the compile task when the git repo is available.
+ """Write out a JSON file containing the tests_by_executor dict.
+
+ This should be done during the compile task when the git repo is available.
"""
with open(pathname, "w") as fstream:
json.dump(tests_by_executor, fstream)
def _load_tests_file(pathname):
- """
- Load the list of tests and executors from the specified file. The file might
- not exist, and this is fine. The task running this becomes a nop.
+ """Load the list of tests and executors from the specified file.
+
+ The file might not exist, and this is fine. The task running this becomes a nop.
"""
if not os.path.isfile(pathname):
return None
@@ -315,12 +315,12 @@ def _load_tests_file(pathname):
def _save_report_data(saved_data, pathname, task):
- """
- Read in the report file from the previous resmoke.py run if it exists. We'll concat it to the
- passed saved_data dict.
+ """Read in the report file from the previous resmoke.py run if it exists.
+
+ We'll concat it to the passed saved_data dict.
"""
if not os.path.isfile(pathname):
- return None
+ return
with open(pathname, "r") as fstream:
current_data = json.load(fstream)
@@ -332,6 +332,7 @@ def _save_report_data(saved_data, pathname, task):
def main():
+ """Execute Main program."""
values, args = parse_command_line()
# If a resmoke.py command wasn't passed in, use a simple version.
diff --git a/buildscripts/bypass_compile_and_fetch_binaries.py b/buildscripts/bypass_compile_and_fetch_binaries.py
index 0612367d498..7c46063a595 100644
--- a/buildscripts/bypass_compile_and_fetch_binaries.py
+++ b/buildscripts/bypass_compile_and_fetch_binaries.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+"""Bypass compile and fetch binaries."""
from __future__ import absolute_import
from __future__ import print_function
@@ -7,15 +8,16 @@ import argparse
import json
import os
import re
-import shutil
import sys
import tarfile
import urllib
+# pylint: disable=ungrouped-imports
try:
from urlparse import urlparse
except ImportError:
- from urllib.parse import urlparse
+ from urllib.parse import urlparse # type: ignore
+# pylint: enable=ungrouped-imports
import requests
import yaml
@@ -24,6 +26,7 @@ _IS_WINDOWS = (sys.platform == "win32" or sys.platform == "cygwin")
def executable_name(pathname):
+ """Return the executable name."""
# Ensure that executable files on Windows have a ".exe" extension.
if _IS_WINDOWS and os.path.splitext(pathname)[1] != ".exe":
return "{}.exe".format(pathname)
@@ -31,6 +34,7 @@ def executable_name(pathname):
def archive_name(archive):
+ """Return the archive name."""
# Ensure the right archive extension is used for Windows.
if _IS_WINDOWS:
return "{}.zip".format(archive)
@@ -38,6 +42,7 @@ def archive_name(archive):
def requests_get_json(url):
+ """Return the JSON response."""
response = requests.get(url)
response.raise_for_status()
@@ -49,9 +54,9 @@ def requests_get_json(url):
def read_evg_config():
- """
- Attempts to parse the Evergreen configuration from its home location.
- Returns None if the configuration file wasn't found.
+ """Attempt to parse the Evergreen configuration from its home location.
+
+ Return None if the configuration file wasn't found.
"""
evg_file = os.path.expanduser("~/.evergreen.yml")
if os.path.isfile(evg_file):
@@ -62,18 +67,14 @@ def read_evg_config():
def write_out_bypass_compile_expansions(patch_file, **expansions):
- """
- Write out the macro expansions to given file.
- """
+ """Write out the macro expansions to given file."""
with open(patch_file, "w") as out_file:
print("Saving compile bypass expansions to {0}: ({1})".format(patch_file, expansions))
yaml.safe_dump(expansions, out_file, default_flow_style=False)
def write_out_artifacts(json_file, artifacts):
- """
- Write out the JSON file with URLs of artifacts to given file.
- """
+ """Write out the JSON file with URLs of artifacts to given file."""
with open(json_file, "w") as out_file:
print("Generating artifacts.json from pre-existing artifacts {0}".format(
json.dumps(artifacts, indent=4)))
@@ -81,6 +82,7 @@ def write_out_artifacts(json_file, artifacts):
def generate_bypass_expansions(project, build_variant, revision, build_id):
+ """Perform the generate bypass expansions."""
expansions = {}
# With compile bypass we need to update the URL to point to the correct name of the base commit
# binaries.
@@ -103,8 +105,7 @@ def generate_bypass_expansions(project, build_variant, revision, build_id):
def should_bypass_compile():
- """
- Based on the modified patch files determine whether the compile stage should be bypassed.
+ """Determine whether the compile stage should be bypassed based on the modified patch files.
We use lists of files and directories to more precisely control which modified patch files will
lead to compile bypass.
@@ -133,7 +134,7 @@ def should_bypass_compile():
"buildscripts/make_archive.py",
"buildscripts/moduleconfig.py",
"buildscripts/msitrim.py",
- "buildscripts/packager-enterprise.py",
+ "buildscripts/packager_enterprise.py",
"buildscripts/packager.py",
"buildscripts/scons.py",
"buildscripts/utils.py",
@@ -171,6 +172,7 @@ def should_bypass_compile():
def parse_args():
+ """Parse the program arguments."""
parser = argparse.ArgumentParser()
parser.add_argument("--project", required=True,
help="The Evergreen project. e.g mongodb-mongo-master")
@@ -192,8 +194,9 @@ def parse_args():
return parser.parse_args()
-def main():
- """
+def main(): # pylint: disable=too-many-locals,too-many-statements
+ """Execute Main entry.
+
From the /rest/v1/projects/{project}/revisions/{revision} endpoint find an existing build id
to generate the compile task id to use for retrieving artifacts when bypassing compile.
@@ -225,6 +228,7 @@ def main():
# Evergreen only contain "_". Replace the hyphens before searching for the build.
prefix = prefix.replace("-", "_")
build_id_pattern = re.compile(prefix)
+ build_id = None
for build_id in revisions["builds"]:
# Find a suitable build_id
match = build_id_pattern.search(build_id)
diff --git a/buildscripts/ciconfig/evergreen.py b/buildscripts/ciconfig/evergreen.py
index 3c41d6e2012..16af4226078 100644
--- a/buildscripts/ciconfig/evergreen.py
+++ b/buildscripts/ciconfig/evergreen.py
@@ -1,4 +1,5 @@
"""API to parse and access the configuration present in a evergreen.yml file.
+
The API also provides methods to access specific fields present in the mongodb/mongo
configuration file.
"""
@@ -31,7 +32,7 @@ class EvergreenProjectConfig(object):
@property
def task_names(self):
- """The list of task names."""
+ """Get the list of task names."""
return self._tasks_by_name.keys()
def get_task(self, task_name):
@@ -40,7 +41,7 @@ class EvergreenProjectConfig(object):
@property
def lifecycle_task_names(self):
- """The list of names of the tasks that have not been excluded from test lifecycle."""
+ """Get the list of names of the tasks that have not been excluded from test lifecycle."""
excluded = self._get_test_lifecycle_excluded_task_names()
return [name for name in self.task_names if name not in excluded]
@@ -53,7 +54,7 @@ class EvergreenProjectConfig(object):
@property
def variant_names(self):
- """The list of build variant names."""
+ """Get the list of build variant names."""
return self._variants_by_name.keys()
def get_variant(self, variant_name):
@@ -70,19 +71,17 @@ class Task(object):
@property
def name(self):
- """The task name."""
+ """Get the task name."""
return self.raw["name"]
@property
def depends_on(self):
- """The list of task names this task depends on."""
+ """Get the list of task names this task depends on."""
return self.raw.get("depends_on", [])
@property
def resmoke_args(self):
- """The value of the resmoke_args argument of the 'run tests' function if it is
- defined, or None.
- """
+ """Get the resmoke_args from 'run tests' function if defined, or None."""
for command in self.raw.get("commands", []):
if command.get("func") == "run tests":
return command.get("vars", {}).get("resmoke_args")
@@ -90,22 +89,21 @@ class Task(object):
@property
def resmoke_suite(self):
- """The value of the --suites option in the resmoke_args argument of the 'run tests'
- function if it is defined, or None. """
+ """Get the --suites option in the resmoke_args of 'run tests' if defined, or None."""
args = self.resmoke_args
if args:
return ResmokeArgs.get_arg(args, "suites")
+ return None
def __str__(self):
return self.name
class Variant(object):
- """Represent a build variant configuration as found in an Evergreen project
- configuration file.
- """
+ """Build variant configuration as found in an Evergreen project configuration file."""
def __init__(self, conf_dict, task_map):
+ """Initialize Variant."""
self.raw = conf_dict
run_on = self.run_on
self.tasks = [
@@ -118,40 +116,44 @@ class Variant(object):
@property
def name(self):
- """The build variant name."""
+ """Get the build variant name."""
return self.raw["name"]
@property
def display_name(self):
- """The build variant display name, or None if not found."""
+ """Get the build variant display name, or None if not found."""
return self.raw.get("display_name")
@property
def batchtime(self):
- """The build variant batchtime parameter as a datetime.timedelta, or None if not found."""
+ """Get the build variant batchtime parameter as datetime.timedelta.
+
+ Return None if the batchtime parameter is not found.
+ """
batchtime = self.raw.get("batchtime")
return datetime.timedelta(minutes=batchtime) if batchtime is not None else None
@property
def modules(self):
- """The build variant modules parameter as a list of module names."""
+ """Get build variant modules parameter as a list of module names."""
modules = self.raw.get("modules")
return modules if modules is not None else []
@property
def run_on(self):
- """The build variant run_on parameter as a list of distro names."""
+ """Get build variant run_on parameter as a list of distro names."""
run_on = self.raw.get("run_on")
return run_on if run_on is not None else []
@property
def task_names(self):
- """The list of task names."""
+ """Get list of task names."""
return [t.name for t in self.tasks]
def get_task(self, task_name):
- """Return the task with the given name as an instance of VariantTask or None if this
- variant does not run the task.
+ """Return the task with the given name as an instance of VariantTask.
+
+ Return None if this variant does not run the task.
"""
for task in self.tasks:
if task.name == task_name:
@@ -169,12 +171,12 @@ class Variant(object):
@property
def test_flags(self):
- """Return the value of the test_flags expansion or None if not found."""
+ """Get the value of the test_flags expansion or None if not found."""
return self.expansion("test_flags")
@property
def num_jobs_available(self):
- """Return the value of the num_jobs_available expansion or None if not found."""
+ """Get the value of the num_jobs_available expansion or None if not found."""
return self.expansion("num_jobs_available")
@@ -182,16 +184,17 @@ class VariantTask(Task):
"""Represent a task definition in the context of a build variant."""
def __init__(self, task, run_on, variant):
+ """Initialize VariantTask."""
Task.__init__(self, task.raw)
self.run_on = run_on
self.variant = variant
@property
def combined_resmoke_args(self):
- """Return the combined resmoke arguments resulting from the concatenation of the task's
- resmoke_args parameter and the variant's test_flags parameter.
+ """Get the combined resmoke arguments.
- If the task does not have a 'resmoke_args' parameter, then None is returned.
+ This results from the concatenation of the task's resmoke_args parameter and the
+ variant's test_flags parameter.
"""
resmoke_args = self.resmoke_args
test_flags = self.variant.test_flags
@@ -199,16 +202,16 @@ class VariantTask(Task):
return None
elif test_flags is None:
return self.resmoke_args
- else:
- return "{} {}".format(resmoke_args, test_flags)
+ return "{} {}".format(resmoke_args, test_flags)
class ResmokeArgs(object):
+ """ResmokeArgs class."""
+
@staticmethod
def get_arg(resmoke_args, name):
- """Return the value of the option --'name' in the 'resmoke_args' string or
- None if not found.
- """
+ """Return the value from --'name' in the 'resmoke_args' string or None if not found."""
match = re.search(r"--{}[=\s](?P<value>\w+)".format(name), resmoke_args)
if match:
return match.group("value")
+ return None
diff --git a/buildscripts/ciconfig/tags.py b/buildscripts/ciconfig/tags.py
index 7e9688714f5..1627c117a98 100644
--- a/buildscripts/ciconfig/tags.py
+++ b/buildscripts/ciconfig/tags.py
@@ -119,21 +119,23 @@ class TagsConfig(object):
def getdefault(doc, key, default):
- """Return the value in 'doc' with key 'key' if it is present and not None, returns
- the specified default value otherwise."""
+ """Return the value in 'doc' with key 'key' if present and not None.
+
+ Return the specified default value otherwise.
+ """
value = doc.get(key)
if value is not None:
return value
- else:
- return default
+ return default
def setdefault(doc, key, default):
- """Return the value in 'doc' with key 'key' if it is present and not None, sets the value
- to default and return it otherwise."""
+ """Return the value in 'doc' with key 'key' if present and not None.
+
+ Otherwise set the value to default and return it.
+ """
value = doc.setdefault(key, default)
if value is not None:
return value
- else:
- doc[key] = default
- return default
+ doc[key] = default
+ return default
diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py
index 82496a07953..af3a53d29f8 100755
--- a/buildscripts/clang_format.py
+++ b/buildscripts/clang_format.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-"""
-A script that provides:
+"""Clang format script that provides the following.
+
1. Ability to grab binaries where possible from LLVM.
2. Ability to download binaries from MongoDB cache for clang-format.
3. Validates clang-format is the right version.
@@ -21,7 +21,7 @@ import tarfile
import tempfile
import threading
import urllib2
-from distutils import spawn
+from distutils import spawn # pylint: disable=no-name-in-module
from optparse import OptionParser
from multiprocessing import cpu_count
@@ -29,8 +29,8 @@ from multiprocessing import cpu_count
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
-from buildscripts.linter import git
-from buildscripts.linter import parallel
+from buildscripts.linter import git # pylint: disable=wrong-import-position
+from buildscripts.linter import parallel # pylint: disable=wrong-import-position
##############################################################################
#
@@ -58,18 +58,17 @@ CLANG_FORMAT_SOURCE_TAR_BASE = string.Template(
##############################################################################
def callo(args):
- """Call a program, and capture its output
- """
+ """Call a program, and capture its output."""
return subprocess.check_output(args)
def get_tar_path(version, tar_path):
- """ Get the path to clang-format in the llvm tarball
- """
+ """Return the path to clang-format in the llvm tarball."""
return CLANG_FORMAT_SOURCE_TAR_BASE.substitute(version=version, tar_path=tar_path)
def extract_clang_format(tar_path):
+ """Extract the clang_format tar file."""
# Extract just the clang-format binary
# On OSX, we shell out to tar because tarfile doesn't support xz compression
if sys.platform == 'darwin':
@@ -85,9 +84,7 @@ def extract_clang_format(tar_path):
def get_clang_format_from_cache_and_extract(url, tarball_ext):
- """Get clang-format from mongodb's cache
- and extract the tarball
- """
+ """Get clang-format from mongodb's cache and extract the tarball."""
dest_dir = tempfile.gettempdir()
temp_tar_file = os.path.join(dest_dir, "temp.tar" + tarball_ext)
@@ -100,8 +97,8 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
for attempt in range(num_tries):
try:
resp = urllib2.urlopen(url)
- with open(temp_tar_file, 'wb') as f:
- f.write(resp.read())
+ with open(temp_tar_file, 'wb') as fh:
+ fh.write(resp.read())
break
except urllib2.URLError:
if attempt == num_tries - 1:
@@ -112,9 +109,7 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
def get_clang_format_from_darwin_cache(dest_file):
- """Download clang-format from llvm.org, unpack the tarball,
- and put clang-format in the specified place
- """
+ """Download clang-format from llvm.org, unpack the tarball to dest_file."""
get_clang_format_from_cache_and_extract(CLANG_FORMAT_HTTP_DARWIN_CACHE, ".xz")
# Destination Path
@@ -122,8 +117,7 @@ def get_clang_format_from_darwin_cache(dest_file):
def get_clang_format_from_linux_cache(dest_file):
- """Get clang-format from mongodb's cache
- """
+ """Get clang-format from mongodb's cache."""
get_clang_format_from_cache_and_extract(CLANG_FORMAT_HTTP_LINUX_CACHE, ".gz")
# Destination Path
@@ -131,11 +125,10 @@ def get_clang_format_from_linux_cache(dest_file):
class ClangFormat(object):
- """Class encapsulates finding a suitable copy of clang-format,
- and linting/formating an individual file
- """
+ """ClangFormat class."""
- def __init__(self, path, cache_dir):
+ def __init__(self, path, cache_dir): # pylint: disable=too-many-branches
+ """Initialize ClangFormat."""
self.path = None
clang_format_progname_ext = ""
@@ -167,7 +160,7 @@ class ClangFormat(object):
]
if sys.platform == "win32":
- for i in range(len(programs)):
+ for i, _ in enumerate(programs):
programs[i] += '.exe'
for program in programs:
@@ -222,8 +215,7 @@ class ClangFormat(object):
self.print_lock = threading.Lock()
def _validate_version(self):
- """Validate clang-format is the expected version
- """
+ """Validate clang-format is the expected version."""
cf_version = callo([self.path, "--version"])
if CLANG_FORMAT_VERSION in cf_version:
@@ -235,8 +227,7 @@ class ClangFormat(object):
return False
def _lint(self, file_name, print_diff):
- """Check the specified file has the correct format
- """
+ """Check the specified file has the correct format."""
with open(file_name, 'rb') as original_text:
original_file = original_text.read()
@@ -262,13 +253,11 @@ class ClangFormat(object):
return True
def lint(self, file_name):
- """Check the specified file has the correct format
- """
+ """Check the specified file has the correct format."""
return self._lint(file_name, print_diff=True)
def format(self, file_name):
- """Update the format of the specified file
- """
+ """Update the format of the specified file."""
if self._lint(file_name, print_diff=False):
return True
@@ -285,32 +274,28 @@ class ClangFormat(object):
return formatted
-files_re = re.compile('\\.(h|hpp|ipp|cpp|js)$')
+FILES_RE = re.compile('\\.(h|hpp|ipp|cpp|js)$')
def is_interesting_file(file_name):
- """"Return true if this file should be checked
- """
+ """Return true if this file should be checked."""
return ((file_name.startswith("jstests") or file_name.startswith("src"))
and not file_name.startswith("src/third_party/")
- and not file_name.startswith("src/mongo/gotools/")) and files_re.search(file_name)
+ and not file_name.startswith("src/mongo/gotools/")) and FILES_RE.search(file_name)
def get_list_from_lines(lines):
- """"Convert a string containing a series of lines into a list of strings
- """
+ """Convert a string containing a series of lines into a list of strings."""
return [line.rstrip() for line in lines.splitlines()]
def _get_build_dir():
- """Get the location of the scons' build directory in case we need to download clang-format
- """
+ """Return the location of the scons' build directory."""
return os.path.join(git.get_base_dir(), "build")
def _lint_files(clang_format, files):
- """Lint a list of files with clang-format
- """
+ """Lint a list of files with clang-format."""
clang_format = ClangFormat(clang_format, _get_build_dir())
lint_clean = parallel.parallel_process([os.path.abspath(f) for f in files], clang_format.lint)
@@ -321,8 +306,7 @@ def _lint_files(clang_format, files):
def lint_patch(clang_format, infile):
- """Lint patch command entry point
- """
+ """Lint patch command entry point."""
files = git.get_files_to_check_from_patch(infile, is_interesting_file)
# Patch may have files that we do not want to check which is fine
@@ -331,8 +315,7 @@ def lint_patch(clang_format, infile):
def lint(clang_format):
- """Lint files command entry point
- """
+ """Lint files command entry point."""
files = git.get_files_to_check([], is_interesting_file)
_lint_files(clang_format, files)
@@ -341,8 +324,7 @@ def lint(clang_format):
def lint_all(clang_format):
- """Lint files command entry point based on working tree
- """
+ """Lint files command entry point based on working tree."""
files = git.get_files_to_check_working_tree(is_interesting_file)
_lint_files(clang_format, files)
@@ -351,8 +333,7 @@ def lint_all(clang_format):
def _format_files(clang_format, files):
- """Format a list of files with clang-format
- """
+ """Format a list of files with clang-format."""
clang_format = ClangFormat(clang_format, _get_build_dir())
format_clean = parallel.parallel_process([os.path.abspath(f) for f in files],
@@ -364,16 +345,15 @@ def _format_files(clang_format, files):
def format_func(clang_format):
- """Format files command entry point
- """
+ """Format files command entry point."""
files = git.get_files_to_check([], is_interesting_file)
_format_files(clang_format, files)
-def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reformat):
- """Reformat a branch made before a clang-format run
- """
+def reformat_branch( # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ clang_format, commit_prior_to_reformat, commit_after_reformat):
+ """Reformat a branch made before a clang-format run."""
clang_format = ClangFormat(clang_format, _get_build_dir())
if os.getcwd() != git.get_base_dir():
@@ -515,16 +495,14 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
def usage():
- """Print usage
- """
+ """Print usage."""
print(
"clang-format.py supports 5 commands [ lint, lint-all, lint-patch, format, reformat-branch]."
)
def main():
- """Main entry point
- """
+ """Execute Main entry point."""
parser = OptionParser()
parser.add_option("-c", "--clang-format", type="string", dest="clang_format")
diff --git a/buildscripts/collect_resource_info.py b/buildscripts/collect_resource_info.py
index 211d00e3235..6c486e81e16 100755
--- a/buildscripts/collect_resource_info.py
+++ b/buildscripts/collect_resource_info.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Collect system resource information on processes running in Evergreen on a given interval.
-"""
+"""Collect system resource information on processes running in Evergreen on a given interval."""
from __future__ import absolute_import
from __future__ import print_function
@@ -19,10 +17,11 @@ import requests
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts.resmokelib import utils
+from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
def main():
+ """Main."""
usage = "usage: %prog [options]"
parser = optparse.OptionParser(description=__doc__, usage=usage)
parser.add_option("-i", "--interval", dest="interval", default=5, type="int",
diff --git a/buildscripts/combine_reports.py b/buildscripts/combine_reports.py
index e59cc29f172..45fc0bcf38b 100755
--- a/buildscripts/combine_reports.py
+++ b/buildscripts/combine_reports.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Combines JSON report files used in Evergreen
-"""
+"""Combine JSON report files used in Evergreen."""
from __future__ import absolute_import
from __future__ import print_function
@@ -16,20 +14,24 @@ from optparse import OptionParser
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts.resmokelib.testing import report
-from buildscripts.resmokelib import utils
+from buildscripts.resmokelib.testing import report # pylint: disable=wrong-import-position
+from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
def read_json_file(json_file):
+ """Read JSON file."""
with open(json_file) as json_data:
return json.load(json_data)
def report_exit(combined_test_report):
- """The exit code of this script is based on the following:
+ """Return report exit code.
+
+ The exit code of this script is based on the following:
0: All tests have status "pass", or only non-dynamic tests have status "silentfail".
31: At least one test has status "fail" or "timeout".
- Note: A test can be considered dynamic if its name contains a ":" character."""
+ Note: A test can be considered dynamic if its name contains a ":" character.
+ """
ret = 0
for test in combined_test_report.test_infos:
@@ -39,9 +41,7 @@ def report_exit(combined_test_report):
def check_error(input_count, output_count):
- """
- Error if both input and output exist, or if neither exist.
- """
+ """Raise error if both input and output exist, or if neither exist."""
if (not input_count) and (not output_count):
raise ValueError("None of the input file(s) or output file exists")
@@ -50,6 +50,7 @@ def check_error(input_count, output_count):
def main():
+ """Execute Main program."""
usage = "usage: %prog [options] report1.json report2.json ..."
parser = OptionParser(description=__doc__, usage=usage)
parser.add_option("-o", "--output-file", dest="outfile", default="-",
@@ -73,9 +74,9 @@ def main():
try:
report_file_json = read_json_file(report_file)
test_reports.append(report.TestReport.from_dict(report_file_json))
- except IOError as e:
+ except IOError as err:
# errno.ENOENT is the error code for "No such file or directory".
- if e.errno == errno.ENOENT:
+ if err.errno == errno.ENOENT:
report_files_count -= 1
continue
raise
diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py
index 17c59badde3..8a6e8efb36a 100755
--- a/buildscripts/errorcodes.py
+++ b/buildscripts/errorcodes.py
@@ -1,58 +1,40 @@
#!/usr/bin/env python
-"""Produces a report of all assertions in the MongoDB server codebase.
+"""Produce a report of all assertions in the MongoDB server codebase.
Parses .cpp files for assertions and verifies assertion codes are distinct.
Optionally replaces zero codes in source code with new distinct values.
"""
+from __future__ import print_function
+
import bisect
-import os
import sys
-import utils
from collections import defaultdict, namedtuple
from optparse import OptionParser
+from buildscripts import utils
+
try:
import regex as re
except ImportError:
print("*** Run 'pip2 install --user regex' to speed up error code checking")
- import re
+ import re # type: ignore
ASSERT_NAMES = ["uassert", "massert", "fassert", "fassertFailed"]
MINIMUM_CODE = 10000
-codes = []
+# pylint: disable=invalid-name
+codes = [] # type: ignore
+# pylint: enable=invalid-name
# Each AssertLocation identifies the C++ source location of an assertion
AssertLocation = namedtuple("AssertLocation", ['sourceFile', 'byteOffset', 'lines', 'code'])
-list_files = False
-
-
-# Of historical interest only
-def assignErrorCodes():
- cur = MINIMUM_CODE
- for root in ASSERT_NAMES:
- for x in utils.getAllSourceFiles():
- print(x)
- didAnything = False
- fixed = ""
- for line in open(x):
- s = line.partition(root + "(")
- if s[1] == "" or line.startswith("#define " + root):
- fixed += line
- continue
- fixed += s[0] + root + "( " + str(cur) + " , " + s[2]
- cur = cur + 1
- didAnything = True
- if didAnything:
- out = open(x, 'w')
- out.write(fixed)
- out.close()
-
-
-def parseSourceFiles(callback):
- """Walks MongoDB sourcefiles and invokes callback for each AssertLocation found."""
+list_files = False # pylint: disable=invalid-name
+
+
+def parse_source_files(callback):
+ """Walk MongoDB sourcefiles and invoke a callback for each AssertLocation found."""
quick = ["assert", "Exception", "ErrorCodes::Error"]
@@ -64,12 +46,12 @@ def parseSourceFiles(callback):
re.compile(r"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE)
]
- for sourceFile in utils.getAllSourceFiles(prefix='src/mongo/'):
+ for source_file in utils.get_all_source_files(prefix='src/mongo/'):
if list_files:
- print 'scanning file: ' + sourceFile
+ print('scanning file: ' + source_file)
- with open(sourceFile) as f:
- text = f.read()
+ with open(source_file) as fh:
+ text = fh.read()
if not any([zz in text for zz in quick]):
continue
@@ -78,22 +60,24 @@ def parseSourceFiles(callback):
for matchiter in matchiters:
for match in matchiter:
code = match.group(1)
- codeOffset = match.start(1)
+ code_offset = match.start(1)
# Note that this will include the text of the full match but will report the
# position of the beginning of the code portion rather than the beginning of the
# match. This is to position editors on the spot that needs to change.
- thisLoc = AssertLocation(sourceFile, codeOffset,
- text[match.start():match.end()], code)
+ this_loc = AssertLocation(source_file, code_offset,
+ text[match.start():match.end()], code)
- callback(thisLoc)
+ callback(this_loc)
-# Converts an absolute position in a file into a line number.
-def getLineAndColumnForPosition(loc, _file_cache={}):
+def get_line_and_column_for_position(loc, _file_cache=None):
+ """Convert an absolute position in a file into a line number."""
+ if _file_cache is None:
+ _file_cache = {}
if loc.sourceFile not in _file_cache:
- with open(loc.sourceFile) as f:
- text = f.read()
+ with open(loc.sourceFile) as fh:
+ text = fh.read()
line_offsets = [0]
for line in text.splitlines(True):
line_offsets.append(line_offsets[-1] + len(line))
@@ -105,150 +89,115 @@ def getLineAndColumnForPosition(loc, _file_cache={}):
return (line, column)
-def isTerminated(lines):
- """Given .cpp/.h source lines as text, determine if assert is terminated."""
- x = " ".join(lines)
- return ';' in x \
- or x.count('(') - x.count(')') <= 0
+def is_terminated(lines):
+ """Determine if assert is terminated, from .cpp/.h source lines as text."""
+ code_block = " ".join(lines)
+ return ';' in code_block or code_block.count('(') - code_block.count(')') <= 0
-def getNextCode():
- """Finds next unused assertion code.
+def get_next_code():
+ """Find next unused assertion code.
Called by: SConstruct and main()
Since SConstruct calls us, codes[] must be global OR WE REPARSE EVERYTHING
"""
- if not len(codes) > 0:
- readErrorCodes()
+ if not codes:
+ read_error_codes()
highest = reduce(lambda x, y: max(int(x), int(y)), (loc.code for loc in codes))
return highest + 1
-def checkErrorCodes():
- """SConstruct expects a boolean response from this function.
- """
- (codes, errors) = readErrorCodes()
+def check_error_codes():
+ """Check error codes as SConstruct expects a boolean response from this function."""
+ (_, errors) = read_error_codes()
return len(errors) == 0
-def readErrorCodes():
- """Defines callback, calls parseSourceFiles() with callback,
- and saves matches to global codes list.
- """
+def read_error_codes():
+ """Define callback, call parse_source_files() with callback, save matches to global codes list."""
seen = {}
errors = []
dups = defaultdict(list)
# define callback
- def checkDups(assertLoc):
- codes.append(assertLoc)
- code = assertLoc.code
+ def check_dups(assert_loc):
+ """Check for duplicates."""
+ codes.append(assert_loc)
+ code = assert_loc.code
if not code in seen:
- seen[code] = assertLoc
+ seen[code] = assert_loc
else:
if not code in dups:
# on first duplicate, add original to dups, errors
dups[code].append(seen[code])
errors.append(seen[code])
- dups[code].append(assertLoc)
- errors.append(assertLoc)
+ dups[code].append(assert_loc)
+ errors.append(assert_loc)
- parseSourceFiles(checkDups)
+ parse_source_files(check_dups)
- if seen.has_key("0"):
+ if "0" in seen:
code = "0"
bad = seen[code]
errors.append(bad)
- line, col = getLineAndColumnForPosition(bad)
+ line, col = get_line_and_column_for_position(bad)
print("ZERO_CODE:")
print(" %s:%d:%d:%s" % (bad.sourceFile, line, col, bad.lines))
for code, locations in dups.items():
print("DUPLICATE IDS: %s" % code)
for loc in locations:
- line, col = getLineAndColumnForPosition(loc)
+ line, col = get_line_and_column_for_position(loc)
print(" %s:%d:%d:%s" % (loc.sourceFile, line, col, loc.lines))
return (codes, errors)
-def replaceBadCodes(errors, nextCode):
- """Modifies C++ source files to replace invalid assertion codes.
+def replace_bad_codes(errors, next_code): # pylint: disable=too-many-locals
+ """Modify C++ source files to replace invalid assertion codes.
+
For now, we only modify zero codes.
Args:
errors: list of AssertLocation
- nextCode: int, next non-conflicting assertion code
+ next_code: int, next non-conflicting assertion code
"""
zero_errors = [e for e in errors if int(e.code) == 0]
skip_errors = [e for e in errors if int(e.code) != 0]
for loc in skip_errors:
- line, col = getLineAndColumnForPosition(loc)
+ line, col = get_line_and_column_for_position(loc)
print("SKIPPING NONZERO code=%s: %s:%d:%d" % (loc.code, loc.sourceFile, line, col))
# Dedupe, sort, and reverse so we don't have to update offsets as we go.
- for assertLoc in reversed(sorted(set(zero_errors))):
- (sourceFile, byteOffset, lines, code) = assertLoc
- lineNum, _ = getLineAndColumnForPosition(assertLoc)
- print "UPDATING_FILE: %s:%s" % (sourceFile, lineNum)
-
- ln = lineNum - 1
-
- with open(sourceFile, 'r+') as f:
- print "LINE_%d_BEFORE:%s" % (lineNum, f.readlines()[ln].rstrip())
-
- f.seek(0)
- text = f.read()
- assert text[byteOffset] == '0'
- f.seek(0)
- f.write(text[:byteOffset])
- f.write(str(nextCode))
- f.write(text[byteOffset + 1:])
- f.seek(0)
-
- print "LINE_%d_AFTER :%s" % (lineNum, f.readlines()[ln].rstrip())
- nextCode += 1
-
-
-def getBestMessage(lines, codeStr):
- """Extracts message from one AssertionLocation.lines entry
-
- Args:
- lines: list of contiguous C++ source lines
- codeStr: assertion code found in first line
- """
- line = lines if isinstance(lines, str) else " ".join(lines)
-
- err = line.partition(codeStr)[2]
- if not err:
- return ""
-
- # Trim to outer quotes
- m = re.search(r'"(.*)"', err)
- if not m:
- return ""
- err = m.group(1)
+ for assert_loc in reversed(sorted(set(zero_errors))):
+ (source_file, byte_offset, _, _) = assert_loc
+ line_num, _ = get_line_and_column_for_position(assert_loc)
+ print("UPDATING_FILE: %s:%s" % (source_file, line_num))
- # Trim inner quote pairs
- err = re.sub(r'" +"', '', err)
- err = re.sub(r'" *<< *"', '', err)
- err = re.sub(r'" *<<[^<]+<< *"', '<X>', err)
- err = re.sub(r'" *\+[^+]+\+ *"', '<X>', err)
+ ln = line_num - 1
- # Trim escaped quotes
- err = re.sub(r'\\"', '', err)
+ with open(source_file, 'r+') as fh:
+ print("LINE_%d_BEFORE:%s" % (line_num, fh.readlines()[ln].rstrip()))
- # Iff doublequote still present, trim that and any trailing text
- err = re.sub(r'".*$', '', err)
+ fh.seek(0)
+ text = fh.read()
+ assert text[byte_offset] == '0'
+ fh.seek(0)
+ fh.write(text[:byte_offset])
+ fh.write(str(next_code))
+ fh.write(text[byte_offset + 1:])
+ fh.seek(0)
- return err.strip()
+ print("LINE_%d_AFTER :%s" % (line_num, fh.readlines()[ln].rstrip()))
+ next_code += 1
def main():
+ """Main."""
parser = OptionParser(description=__doc__.strip())
parser.add_option("--fix", dest="replace", action="store_true", default=False,
help="Fix zero codes in source files [default: %default]")
@@ -256,28 +205,28 @@ def main():
help="Suppress output on success [default: %default]")
parser.add_option("--list-files", dest="list_files", action="store_true", default=False,
help="Print the name of each file as it is scanned [default: %default]")
- (options, args) = parser.parse_args()
+ (options, _) = parser.parse_args()
- global list_files
+ global list_files # pylint: disable=global-statement,invalid-name
list_files = options.list_files
- (codes, errors) = readErrorCodes()
+ (_, errors) = read_error_codes()
ok = len(errors) == 0
if ok and options.quiet:
return
- next = getNextCode()
+ next_code = get_next_code()
print("ok: %s" % ok)
- print("next: %s" % next)
+ print("next: %s" % next_code)
if ok:
sys.exit(0)
elif options.replace:
- replaceBadCodes(errors, next)
+ replace_bad_codes(errors, next_code)
else:
- print ERROR_HELP
+ print(ERROR_HELP)
sys.exit(1)
diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py
index 8a68220d5cd..bb365311640 100755
--- a/buildscripts/eslint.py
+++ b/buildscripts/eslint.py
@@ -1,15 +1,17 @@
#!/usr/bin/env python
+"""ESLint module.
+
+Will download a prebuilt ESLint binary if necessary (i.e. it isn't installed, isn't in the current
+path, or is the wrong version). It works in much the same way as clang_format.py. In lint mode, it
+will lint the files or directory paths passed. In lint-patch mode, for upload.py, it will see if
+there are any candidate files in the supplied patch. Fix mode will run ESLint with the --fix
+option, and that will update the files with missing semicolons and similar repairable issues.
+There is also a -d mode that assumes you only want to run one copy of ESLint per file / directory
+parameter supplied. This lets ESLint search for candidate files to lint.
"""
-eslint.py
- Will download a prebuilt ESLint binary if necessary (i.e. it isn't installed, isn't in the current
- path, or is the wrong version). It works in much the same way as clang_format.py. In lint mode, it
- will lint the files or directory paths passed. In lint-patch mode, for upload.py, it will see if
- there are any candidate files in the supplied patch. Fix mode will run ESLint with the --fix
- option, and that will update the files with missing semicolons and similar repairable issues.
- There is also a -d mode that assumes you only want to run one copy of ESLint per file / directory
- parameter supplied. This lets ESLint search for candidate files to lint.
-"""
-import itertools
+
+from __future__ import print_function
+
import os
import shutil
import string
@@ -19,17 +21,17 @@ import tarfile
import tempfile
import threading
import urllib
-from distutils import spawn
+from distutils import spawn # pylint: disable=no-name-in-module
from optparse import OptionParser
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
-from buildscripts.resmokelib.utils import globstar
+from buildscripts.resmokelib.utils import globstar # pylint: disable=wrong-import-position
-from buildscripts.linter import git
-from buildscripts.linter import parallel
+from buildscripts.linter import git # pylint: disable=wrong-import-position
+from buildscripts.linter import parallel # pylint: disable=wrong-import-position
##############################################################################
#
@@ -54,12 +56,12 @@ ESLINT_SOURCE_TAR_BASE = string.Template(ESLINT_PROGNAME + "-$platform-$arch")
def callo(args):
- """Call a program, and capture its output
- """
+ """Call a program, and capture its output."""
return subprocess.check_output(args)
def extract_eslint(tar_path, target_file):
+ """Extract ESLint tar file."""
tarfp = tarfile.open(tar_path)
for name in tarfp.getnames():
if name == target_file:
@@ -68,8 +70,7 @@ def extract_eslint(tar_path, target_file):
def get_eslint_from_cache(dest_file, platform, arch):
- """Get ESLint binary from mongodb's cache
- """
+ """Get ESLint binary from mongodb's cache."""
# Get URL
if platform == "Linux":
url = ESLINT_HTTP_LINUX_CACHE
@@ -91,10 +92,10 @@ def get_eslint_from_cache(dest_file, platform, arch):
class ESLint(object):
- """Class encapsulates finding a suitable copy of ESLint, and linting an individual file
- """
+ """Class encapsulates finding a suitable copy of ESLint, and linting an individual file."""
- def __init__(self, path, cache_dir):
+ def __init__(self, path, cache_dir): # pylint: disable=too-many-branches
+ """Initialize ESLint."""
eslint_progname = ESLINT_PROGNAME
# Initialize ESLint configuration information
@@ -150,8 +151,7 @@ class ESLint(object):
self.print_lock = threading.Lock()
def _validate_version(self, warn=False):
- """Validate ESLint is the expected version
- """
+ """Validate ESLint is the expected version."""
esl_version = callo([self.path, "--version"]).rstrip()
# Ignore the leading v in the version string.
if ESLINT_VERSION == esl_version[1:]:
@@ -163,52 +163,43 @@ class ESLint(object):
return False
def _lint(self, file_name, print_diff):
- """Check the specified file for linting errors
- """
+ """Check the specified file for linting errors."""
# ESLint returns non-zero on a linting error. That's all we care about
# so only enter the printing logic if we have an error.
try:
- eslint_output = callo([self.path, "-f", "unix", file_name])
- except subprocess.CalledProcessError as e:
+ callo([self.path, "-f", "unix", file_name])
+ except subprocess.CalledProcessError as err:
if print_diff:
# Take a lock to ensure error messages do not get mixed when printed to the screen
with self.print_lock:
print("ERROR: ESLint found errors in " + file_name)
- print(e.output)
- return False
- except:
- print("ERROR: ESLint process threw unexpected error", sys.exc_info()[0])
+ print(err.output)
return False
return True
def lint(self, file_name):
- """Check the specified file has no linting errors
- """
+ """Check the specified file has no linting errors."""
return self._lint(file_name, print_diff=True)
def autofix(self, file_name):
- """ Run ESLint in fix mode.
- """
+ """Run ESLint in fix mode."""
return not subprocess.call([self.path, "--fix", file_name])
def is_interesting_file(file_name):
- """"Return true if this file should be checked
- """
+ """Return true if this file should be checked."""
return ((file_name.startswith("src/mongo") or file_name.startswith("jstests"))
and file_name.endswith(".js"))
def _get_build_dir():
- """Get the location of the scons build directory in case we need to download ESLint
- """
+ """Get the location of the scons build directory in case we need to download ESLint."""
return os.path.join(git.get_base_dir(), "build")
def _lint_files(eslint, files):
- """Lint a list of files with ESLint
- """
+ """Lint a list of files with ESLint."""
eslint = ESLint(eslint, _get_build_dir())
lint_clean = parallel.parallel_process([os.path.abspath(f) for f in files], eslint.lint)
@@ -222,8 +213,7 @@ def _lint_files(eslint, files):
def lint_patch(eslint, infile):
- """Lint patch command entry point
- """
+ """Lint patch command entry point."""
files = git.get_files_to_check_from_patch(infile, is_interesting_file)
# Patch may have files that we do not want to check which is fine
@@ -233,12 +223,11 @@ def lint_patch(eslint, infile):
def lint(eslint, dirmode, glob):
- """Lint files command entry point
- """
+ """Lint files command entry point."""
if dirmode and glob:
files = glob
else:
- files = get_files_to_check(glob, is_interesting_file)
+ files = git.get_files_to_check(glob, is_interesting_file)
_lint_files(eslint, files)
@@ -246,8 +235,7 @@ def lint(eslint, dirmode, glob):
def _autofix_files(eslint, files):
- """Auto-fix the specified files with ESLint.
- """
+ """Auto-fix the specified files with ESLint."""
eslint = ESLint(eslint, _get_build_dir())
autofix_clean = parallel.parallel_process([os.path.abspath(f) for f in files], eslint.autofix)
@@ -255,22 +243,21 @@ def _autofix_files(eslint, files):
if not autofix_clean:
print("ERROR: failed to auto-fix files")
return False
+ return True
def autofix_func(eslint, dirmode, glob):
- """Auto-fix files command entry point
- """
+ """Auto-fix files command entry point."""
if dirmode:
files = glob
else:
- files = get_files_to_check(glob, is_interesting_file)
+ files = git.get_files_to_check(glob, is_interesting_file)
return _autofix_files(eslint, files)
def main():
- """Main entry point
- """
+ """Execute Main entry point."""
success = False
usage = "%prog [-e <eslint>] [-d] lint|lint-patch|fix [glob patterns] "
description = "lint runs ESLint on provided patterns or all .js files under jstests/ "\
diff --git a/buildscripts/evergreen_run_tests.py b/buildscripts/evergreen_run_tests.py
index 3ed822287b3..69520dd472e 100755
--- a/buildscripts/evergreen_run_tests.py
+++ b/buildscripts/evergreen_run_tests.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Command line utility for executing MongoDB tests in Evergreen.
-"""
+"""Command line utility for executing MongoDB tests in Evergreen."""
from __future__ import absolute_import
@@ -13,34 +11,42 @@ import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts import resmoke
-from buildscripts import resmokelib
+from buildscripts import resmoke # pylint: disable=wrong-import-position
+from buildscripts import resmokelib # pylint: disable=wrong-import-position
_TagInfo = collections.namedtuple("_TagInfo", ["tag_name", "evergreen_aware", "suite_options"])
class Main(resmoke.Main):
- """
+ """Execute Main class.
+
A class for executing potentially multiple resmoke.py test suites in a way that handles
additional options for running unreliable tests in Evergreen.
"""
- UNRELIABLE_TAG = _TagInfo(tag_name="unreliable", evergreen_aware=True,
- suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
- report_failure_status="silentfail"))
+ UNRELIABLE_TAG = _TagInfo(
+ tag_name="unreliable",
+ evergreen_aware=True,
+ suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
+ report_failure_status="silentfail"))
RESOURCE_INTENSIVE_TAG = _TagInfo(
- tag_name="resource_intensive", evergreen_aware=False,
- suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(num_jobs=1))
+ tag_name="resource_intensive",
+ evergreen_aware=False,
+ suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
+ num_jobs=1))
RETRY_ON_FAILURE_TAG = _TagInfo(
- tag_name="retry_on_failure", evergreen_aware=True,
- suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
+ tag_name="retry_on_failure",
+ evergreen_aware=True,
+ suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
fail_fast=False, num_repeats=2, report_failure_status="silentfail"))
- def _make_evergreen_aware_tags(self, tag_name):
- """
- Returns a list of resmoke.py tags for task, variant, and distro combinations in Evergreen.
+ @staticmethod
+ def _make_evergreen_aware_tags(tag_name):
+ """Return a list of resmoke.py tags.
+
+ This list is for task, variant, and distro combinations in Evergreen.
"""
tags_format = ["{tag_name}"]
@@ -62,9 +68,10 @@ class Main(resmoke.Main):
@classmethod
def _make_tag_combinations(cls):
- """
- Returns a list of (tag, enabled) pairs representing all possible combinations of all
- possible pairings of whether the tags are enabled or disabled together.
+ """Return a list of (tag, enabled) pairs.
+
+ These pairs represent all possible combinations of all possible pairings
+ of whether the tags are enabled or disabled together.
"""
combinations = []
@@ -96,8 +103,7 @@ class Main(resmoke.Main):
return combinations
def _get_suites(self):
- """
- Returns a list of resmokelib.testing.suite.Suite instances to execute.
+ """Return a list of resmokelib.testing.suite.Suite instances to execute.
For every resmokelib.testing.suite.Suite instance returned by resmoke.Main._get_suites(),
multiple copies of that test suite are run using different resmokelib.config.SuiteOptions()
diff --git a/buildscripts/fetch_test_lifecycle.py b/buildscripts/fetch_test_lifecycle.py
index e1e3db1ffd1..6d3cfc8d5a7 100755
--- a/buildscripts/fetch_test_lifecycle.py
+++ b/buildscripts/fetch_test_lifecycle.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
-"""Script to retrieve the etc/test_lifecycle.yml tag file from the metadata repository that
-corresponds to the current repository.
+"""Retrieve the etc/test_lifecycle.yml tag file from the metadata repository.
+
+This is performed for the current repository.
Usage:
python buildscsripts/fetch_test_lifecycle.py evergreen-project revision
@@ -13,7 +14,6 @@ import logging
import optparse
import os
import posixpath
-import shutil
import sys
import textwrap
@@ -23,7 +23,7 @@ import yaml
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts import git
+from buildscripts import git # pylint: disable=wrong-import-position
LOGGER = logging.getLogger(__name__)
@@ -31,8 +31,9 @@ LOGGER = logging.getLogger(__name__)
class MetadataRepository(object):
"""Represent the metadata repository containing the test lifecycle tags file."""
- def __init__(self, repository, references_file, lifecycle_file):
- """Initlialize the MetadataRepository.
+ def __init__(self, repository, references_file,
+ lifecycle_file): # noqa: D214,D405,D406,D407,D411,D413
+ """Initialize the MetadataRepository.
Args:
repository: the git.Repository object for the repository.
@@ -47,13 +48,13 @@ class MetadataRepository(object):
# The path to the lifecycle file, absolute or relative to the current working directory.
self.lifecycle_path = os.path.join(repository.directory, lifecycle_file)
- def list_revisions(self):
+ def list_revisions(self): # noqa: D406,D407,D413
"""List the revisions from the HEAD of this repository.
Returns:
A list of str containing the git hashes for all the revisions from the newest (HEAD)
to the oldest.
- """
+ """
return self._repository.git_rev_list(["HEAD", "--", self._lifecycle_file]).splitlines()
def _get_references_content(self, revision):
@@ -62,13 +63,15 @@ class MetadataRepository(object):
return references_content
def get_reference(self, metadata_revision, project):
- """Retrieve the reference revision (a revision of the project 'project') associated with
- the test lifecycle file present in the metadata repository at revision 'metadata_revision'.
+ """Retrieve the reference revision (a revision of the project 'project').
- Args:
- metadata_revision: a revision (git hash) of this repository.
- project: an Evergreen project name (e.g. mongodb-mongo-master).
- """
+ The revision is associated with the test lifecycle file present in the metadata repository
+ at revision 'metadata_revision'.
+
+ Args:
+ metadata_revision: a revision (git hash) of this repository.
+ project: an Evergreen project name (e.g. mongodb-mongo-master).
+ """
references_content = self._get_references_content(metadata_revision)
references = yaml.safe_load(references_content)
return references.get("test-lifecycle", {}).get(project)
@@ -100,9 +103,9 @@ def _get_metadata_revision(metadata_repo, mongo_repo, project, revision):
return None
-def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, project, revision):
- """Fetch the test lifecycle file that corresponds to the given revision of the repository this
- script is called from.
+def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, project,
+ revision): # noqa: D214,D405,D406,D407,D411,D413
+ """Fetch the test lifecycle file for the revision in the repository this script is invoked.
Args:
metadata_repo_url: the git repository URL for the metadata repository containing the test
@@ -127,7 +130,8 @@ def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, pro
def main():
- """
+ """Execute Main program.
+
Utility to fetch the etc/test_lifecycle.yml file corresponding to a given revision from
the mongo-test-metadata repository.
"""
diff --git a/buildscripts/gdb/mongo.py b/buildscripts/gdb/mongo.py
index 5888662b6ef..0d7bb626a40 100644
--- a/buildscripts/gdb/mongo.py
+++ b/buildscripts/gdb/mongo.py
@@ -1,12 +1,12 @@
-"""GDB commands for MongoDB
-"""
+"""GDB commands for MongoDB."""
from __future__ import print_function
-import gdb
import os
import re
import sys
+import gdb
+
def get_process_name():
"""Return the main binary we are attached to."""
@@ -16,7 +16,7 @@ def get_process_name():
def get_thread_id():
- """Returns the thread_id of the current GDB thread"""
+ """Return the thread_id of the current GDB thread."""
# GDB thread example:
# RHEL
# [Current thread is 1 (Thread 0x7f072426cca0 (LWP 12867))]
@@ -41,25 +41,36 @@ def get_thread_id():
# Commands
#
###################################################################################################
-# Dictionary of commands so we can write a help function that describes the MongoDB commands.
-mongo_commands = {}
-def register_mongo_command(obj, name, command_class):
- """Register a command with no completer as a mongo command"""
- global mongo_commands
- gdb.Command.__init__(obj, name, command_class)
+class RegisterMongoCommand(object):
+ """Class to register mongo commands with GDB."""
- mongo_commands[name] = obj.__doc__
+ _MONGO_COMMANDS = {} # type: ignore
+
+ @classmethod
+ def register(cls, obj, name, command_class):
+ """Register a command with no completer as a mongo command."""
+ gdb.Command.__init__(obj, name, command_class)
+ cls._MONGO_COMMANDS[name] = obj.__doc__
+
+ @classmethod
+ def print_commands(cls):
+ """Print the registered mongo commands."""
+ print("Command - Description")
+ for key in cls._MONGO_COMMANDS:
+ print("%s - %s" % (key, cls._MONGO_COMMANDS[key]))
class DumpGlobalServiceContext(gdb.Command):
- """Dump the Global Service Context"""
+ """Dump the Global Service Context."""
def __init__(self):
- register_mongo_command(self, "mongodb-service-context", gdb.COMMAND_DATA)
+ """Initialize DumpGlobalServiceContext."""
+ RegisterMongoCommand.register(self, "mongodb-service-context", gdb.COMMAND_DATA)
- def invoke(self, arg, _from_tty):
+ def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
+ """Invoke GDB command to print the Global Service Context."""
gdb.execute("print *('mongo::(anonymous namespace)::globalServiceContext')")
@@ -68,12 +79,14 @@ DumpGlobalServiceContext()
class MongoDBDumpLocks(gdb.Command):
- """Dump locks in mongod process"""
+ """Dump locks in mongod process."""
def __init__(self):
- register_mongo_command(self, "mongodb-dump-locks", gdb.COMMAND_DATA)
+ """Initialize MongoDBDumpLocks."""
+ RegisterMongoCommand.register(self, "mongodb-dump-locks", gdb.COMMAND_DATA)
- def invoke(self, arg, _from_tty):
+ def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
+ """Invoke MongoDBDumpLocks."""
print("Running Hang Analyzer Supplement - MongoDBDumpLocks")
main_binary_name = get_process_name()
@@ -82,8 +95,9 @@ class MongoDBDumpLocks(gdb.Command):
else:
print("Not invoking mongod lock dump for: %s" % (main_binary_name))
- def dump_mongod_locks(self):
- """GDB in-process python supplement"""
+ @staticmethod
+ def dump_mongod_locks():
+ """GDB in-process python supplement."""
try:
# Call into mongod, and dump the state of lock manager
@@ -99,12 +113,14 @@ MongoDBDumpLocks()
class BtIfActive(gdb.Command):
- """Print stack trace or a short message if the current thread is idle"""
+ """Print stack trace or a short message if the current thread is idle."""
def __init__(self):
- register_mongo_command(self, "mongodb-bt-if-active", gdb.COMMAND_DATA)
+ """Initialize BtIfActive."""
+ RegisterMongoCommand.register(self, "mongodb-bt-if-active", gdb.COMMAND_DATA)
- def invoke(self, arg, _from_tty):
+ def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
+ """Invoke GDB to print stack trace."""
try:
idle_location = gdb.parse_and_eval("mongo::for_debuggers::idleThreadLocation")
except gdb.error:
@@ -121,14 +137,16 @@ BtIfActive()
class MongoDBUniqueStack(gdb.Command):
- """Print unique stack traces of all threads in current process"""
+ """Print unique stack traces of all threads in current process."""
_HEADER_FORMAT = "Thread {gdb_thread_num}: {name} (Thread {pthread} (LWP {lwpid})):"
def __init__(self):
- register_mongo_command(self, "mongodb-uniqstack", gdb.COMMAND_DATA)
+ """Initialize MongoDBUniqueStack."""
+ RegisterMongoCommand.register(self, "mongodb-uniqstack", gdb.COMMAND_DATA)
def invoke(self, arg, _from_tty):
+ """Invoke GDB to dump stacks."""
stacks = {}
if not arg:
arg = 'bt' # default to 'bt'
@@ -145,7 +163,9 @@ class MongoDBUniqueStack(gdb.Command):
if current_thread and current_thread.is_valid():
current_thread.switch()
- def _get_current_thread_name(self):
+ @staticmethod
+ def _get_current_thread_name():
+ """Return the current thread name."""
fallback_name = '"%s"' % (gdb.selected_thread().name or '')
try:
# This goes through the pretty printer for StringData which adds "" around the name.
@@ -157,6 +177,7 @@ class MongoDBUniqueStack(gdb.Command):
return fallback_name
def _process_thread_stack(self, arg, stacks, thread):
+ """Process the thread stack."""
thread_info = {} # thread dict to hold per thread data
thread_info['pthread'] = get_thread_id()
thread_info['gdb_thread_num'] = thread.num
@@ -186,9 +207,9 @@ class MongoDBUniqueStack(gdb.Command):
except gdb.error as err:
print("{} {}".format(thread_info['header'], err))
break
- addrs = tuple(addrs) # tuples are hashable, lists aren't.
+ addrs_tuple = tuple(addrs) # tuples are hashable, lists aren't.
- unique = stacks.setdefault(addrs, {'threads': []})
+ unique = stacks.setdefault(addrs_tuple, {'threads': []})
unique['threads'].append(thread_info)
if 'output' not in unique:
try:
@@ -196,8 +217,12 @@ class MongoDBUniqueStack(gdb.Command):
except gdb.error as err:
print("{} {}".format(thread_info['header'], err))
- def _dump_unique_stacks(self, stacks):
+ @staticmethod
+ def _dump_unique_stacks(stacks):
+ """Dump the unique stacks."""
+
def first_tid(stack):
+ """Return the first tid."""
return stack['threads'][0]['gdb_thread_num']
for stack in sorted(stacks.values(), key=first_tid, reverse=True):
@@ -213,12 +238,14 @@ MongoDBUniqueStack()
class MongoDBJavaScriptStack(gdb.Command):
- """Print the JavaScript stack from a MongoDB process"""
+ """Print the JavaScript stack from a MongoDB process."""
def __init__(self):
- register_mongo_command(self, "mongodb-javascript-stack", gdb.COMMAND_STATUS)
+ """Initialize MongoDBJavaScriptStack."""
+ RegisterMongoCommand.register(self, "mongodb-javascript-stack", gdb.COMMAND_STATUS)
- def invoke(self, arg, _from_tty):
+ def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
+ """Invoke GDB to dump JS stacks."""
print("Running Print JavaScript Stack Supplement")
main_binary_name = get_process_name()
@@ -227,8 +254,9 @@ class MongoDBJavaScriptStack(gdb.Command):
else:
print("No JavaScript stack print done for: %s" % (main_binary_name))
- def javascript_stack(self):
- """GDB in-process python supplement"""
+ @staticmethod
+ def javascript_stack():
+ """GDB in-process python supplement."""
for thread in gdb.selected_inferior().threads():
try:
@@ -257,15 +285,15 @@ MongoDBJavaScriptStack()
class MongoDBHelp(gdb.Command):
- """Dump list of mongodb commands"""
+ """Dump list of mongodb commands."""
def __init__(self):
+ """Initialize MongoDBHelp."""
gdb.Command.__init__(self, "mongodb-help", gdb.COMMAND_SUPPORT)
- def invoke(self, arg, _from_tty):
- print("Command - Description")
- for key in mongo_commands:
- print("%s - %s" % (key, mongo_commands[key]))
+ def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
+ """Register the mongo print commands."""
+ RegisterMongoCommand.print_commands()
# Register command
diff --git a/buildscripts/gdb/mongo_lock.py b/buildscripts/gdb/mongo_lock.py
index b045f8344ca..0fb07c0368c 100644
--- a/buildscripts/gdb/mongo_lock.py
+++ b/buildscripts/gdb/mongo_lock.py
@@ -1,19 +1,26 @@
+"""Mongo lock module."""
+
from __future__ import print_function
-import gdb
-import gdb.printing
import re
import sys
+import gdb
+import gdb.printing
+import mongo
+
if sys.version_info[0] >= 3:
# GDB only permits converting a gdb.Value instance to its numerical address when using the
# long() constructor in Python 2 and not when using the int() constructor. We define the
# 'long' class as an alias for the 'int' class in Python 3 for compatibility.
- long = int
+ long = int # pylint: disable=redefined-builtin,invalid-name
class Thread(object):
+ """Thread class."""
+
def __init__(self, thread_id, lwpid):
+ """Initialize Thread."""
self.thread_id = thread_id
self.lwpid = lwpid
@@ -29,11 +36,15 @@ class Thread(object):
return "Thread 0x{:012x} (LWP {})".format(self.thread_id, self.lwpid)
def key(self):
+ """Return thread key."""
return "Thread 0x{:012x}".format(self.thread_id)
class Lock(object):
+ """Lock class."""
+
def __init__(self, addr, resource):
+ """Initialize Lock."""
self.addr = addr
self.resource = resource
@@ -49,35 +60,45 @@ class Lock(object):
return "Lock 0x{:012x} ({})".format(self.addr, self.resource)
def key(self):
+ """Return lock key."""
return "Lock 0x{:012x}".format(self.addr)
class Graph(object):
- # The Graph is a dict with the following structure:
- # {'node_key': {'node': {id: val}, 'next_nodes': [node_key_1, ...]}}
- # Example graph:
- # {
- # 'Lock 1': {'node': {1: 'MongoDB lock'}, 'next_nodes': ['Thread 1']},
- # 'Lock 2': {'node': {2: 'MongoDB lock'}, 'next_nodes': ['Thread 2']},
- # 'Thread 1': {'node': {1: 123}, 'next_nodes': ['Lock 2']},
- # 'Thread 2': {'node': {2: 456}, 'next_nodes': ['Lock 1']}
- # }
+ """Graph class.
+
+ The Graph is a dict with the following structure:
+ {'node_key': {'node': {id: val}, 'next_nodes': [node_key_1, ...]}}
+ Example graph:
+ {
+ 'Lock 1': {'node': {1: 'MongoDB lock'}, 'next_nodes': ['Thread 1']},
+ 'Lock 2': {'node': {2: 'MongoDB lock'}, 'next_nodes': ['Thread 2']},
+ 'Thread 1': {'node': {1: 123}, 'next_nodes': ['Lock 2']},
+ 'Thread 2': {'node': {2: 456}, 'next_nodes': ['Lock 1']}
+ }
+ """
+
def __init__(self):
+ """Initialize Graph."""
self.nodes = {}
def is_empty(self):
+ """Return True if graph is empty."""
return not bool(self.nodes)
def add_node(self, node):
+ """Add node to graph."""
if not self.find_node(node):
self.nodes[node.key()] = {'node': node, 'next_nodes': []}
def find_node(self, node):
+ """Find node in graph."""
if node.key() in self.nodes:
return self.nodes[node.key()]
return None
def find_from_node(self, from_node):
+ """Find from node."""
for node_key in self.nodes:
node = self.nodes[node_key]
for next_node in node['next_nodes']:
@@ -86,6 +107,7 @@ class Graph(object):
return None
def remove_nodes_without_edge(self):
+ """Remove nodes without edge."""
# Rebuild graph by removing any nodes which do not have any incoming or outgoing edges.
temp_nodes = {}
for node_key in self.nodes:
@@ -95,28 +117,31 @@ class Graph(object):
self.nodes = temp_nodes
def add_edge(self, from_node, to_node):
- f = self.find_node(from_node)
- if f is None:
+ """Add edge."""
+ f_node = self.find_node(from_node)
+ if f_node is None:
self.add_node(from_node)
- f = self.nodes[from_node.key()]
+ f_node = self.nodes[from_node.key()]
- t = self.find_node(to_node)
- if t is None:
+ t_node = self.find_node(to_node)
+ if t_node is None:
self.add_node(to_node)
- t = self.nodes[to_node.key()]
+ t_node = self.nodes[to_node.key()]
- for n in f['next_nodes']:
- if n == to_node.key():
+ for n_node in f_node['next_nodes']:
+ if n_node == to_node.key():
return
self.nodes[from_node.key()]['next_nodes'].append(to_node.key())
def print(self):
+ """Print graph."""
for node_key in self.nodes:
print("Node", self.nodes[node_key]['node'])
- for to in self.nodes[node_key]['next_nodes']:
- print(" ->", to)
+ for to_node in self.nodes[node_key]['next_nodes']:
+ print(" ->", to_node)
def to_graph(self, nodes=None, message=None):
+ """Return the 'to_graph'."""
sb = []
sb.append('# Legend:')
sb.append('# Thread 1 -> Lock 1 indicates Thread 1 is waiting on Lock 1')
@@ -136,12 +161,14 @@ class Graph(object):
sb.append("}")
return "\n".join(sb)
- def depth_first_search(self, node_key, nodes_visited, nodes_in_cycle=[]):
- """
+ def depth_first_search(self, node_key, nodes_visited, nodes_in_cycle=None):
+ """Perform depth first search and return the list of nodes in the cycle or None.
+
The nodes_visited is a set of nodes which indicates it has been visited.
The node_in_cycle is a list of nodes in the potential cycle.
- Returns the list of nodes in the cycle or None.
"""
+ if nodes_in_cycle is None:
+ nodes_in_cycle = []
nodes_visited.add(node_key)
nodes_in_cycle.append(node_key)
for node in self.nodes[node_key]['next_nodes']:
@@ -158,9 +185,7 @@ class Graph(object):
return None
def detect_cycle(self):
- """
- If a cycle is detected, returns a list of nodes in the cycle or None.
- """
+ """If a cycle is detected, returns a list of nodes in the cycle or None."""
nodes_visited = set()
for node in self.nodes:
if node not in nodes_visited:
@@ -171,6 +196,7 @@ class Graph(object):
def find_lwpid(thread_dict, search_thread_id):
+ """Find lwpid."""
for (lwpid, thread_id) in thread_dict.items():
if thread_id == search_thread_id:
return lwpid
@@ -178,6 +204,7 @@ def find_lwpid(thread_dict, search_thread_id):
def find_func_block(block):
+ """Find func block."""
while block:
if block.function:
return block
@@ -186,6 +213,7 @@ def find_func_block(block):
def find_frame(function_name_pattern):
+ """Find frame."""
frame = gdb.newest_frame()
while frame:
block = None
@@ -207,6 +235,7 @@ def find_frame(function_name_pattern):
def find_mutex_holder(graph, thread_dict, show):
+ """Find mutex holder."""
frame = find_frame(r'std::mutex::lock\(\)')
if frame is None:
return
@@ -241,6 +270,7 @@ def find_mutex_holder(graph, thread_dict, show):
def find_lock_manager_holders(graph, thread_dict, show):
+ """Find lock manager holders."""
frame = find_frame(r'mongo::LockerImpl\<.*\>::')
if not frame:
return
@@ -253,8 +283,8 @@ def find_lock_manager_holders(graph, thread_dict, show):
lock_head = gdb.parse_and_eval(
"mongo::getGlobalLockManager()->_getBucket(resId)->findOrInsert(resId)")
- grantedList = lock_head.dereference()["grantedList"]
- lock_request_ptr = grantedList["_front"]
+ granted_list = lock_head.dereference()["grantedList"]
+ lock_request_ptr = granted_list["_front"]
while lock_request_ptr:
lock_request = lock_request_ptr.dereference()
locker_ptr = lock_request["locker"]
@@ -274,6 +304,7 @@ def find_lock_manager_holders(graph, thread_dict, show):
def get_locks(graph, thread_dict, show=False):
+ """Get locks."""
for thread in gdb.selected_inferior().threads():
try:
if not thread.is_valid():
@@ -285,7 +316,8 @@ def get_locks(graph, thread_dict, show=False):
print("Ignoring GDB error '%s' in get_locks" % str(err))
-def get_threads_info(graph=None):
+def get_threads_info():
+ """Get threads info."""
thread_dict = {}
for thread in gdb.selected_inferior().threads():
try:
@@ -295,7 +327,7 @@ def get_threads_info(graph=None):
# PTID is a tuple: Process ID (PID), Lightweight Process ID (LWPID), Thread ID (TID)
(_, lwpid, _) = thread.ptid
thread_num = thread.num
- thread_id = get_thread_id()
+ thread_id = mongo.get_thread_id()
if not thread_id:
print("Unable to retrieve thread_info for thread %d" % thread_num)
continue
@@ -307,16 +339,19 @@ def get_threads_info(graph=None):
class MongoDBShowLocks(gdb.Command):
- """Show MongoDB locks & pthread mutexes"""
+ """Show MongoDB locks & pthread mutexes."""
def __init__(self):
- register_mongo_command(self, "mongodb-show-locks", gdb.COMMAND_DATA)
+ """Initialize MongoDBShowLocks."""
+ mongo.register_mongo_command(self, "mongodb-show-locks", gdb.COMMAND_DATA)
- def invoke(self, arg, _from_tty):
+ def invoke(self, *_):
+ """Invoke mongodb_show_locks."""
self.mongodb_show_locks()
- def mongodb_show_locks(self):
- """GDB in-process python supplement"""
+ @staticmethod
+ def mongodb_show_locks():
+ """GDB in-process python supplement."""
try:
thread_dict = get_threads_info()
get_locks(graph=None, thread_dict=thread_dict, show=True)
@@ -324,24 +359,27 @@ class MongoDBShowLocks(gdb.Command):
print("Ignoring GDB error '%s' in mongodb_show_locks" % str(err))
-MongoDBShowLocks()
+mongo.MongoDBShowLocks() # type: ignore
class MongoDBWaitsForGraph(gdb.Command):
- """Create MongoDB WaitsFor lock graph [graph_file]"""
+ """Create MongoDB WaitsFor lock graph [graph_file]."""
def __init__(self):
- register_mongo_command(self, "mongodb-waitsfor-graph", gdb.COMMAND_DATA)
+ """Initialize MongoDBWaitsForGraph."""
+ mongo.register_mongo_command(self, "mongodb-waitsfor-graph", gdb.COMMAND_DATA)
- def invoke(self, arg, _from_tty):
+ def invoke(self, arg, *_):
+ """Invoke mongodb_waitsfor_graph."""
self.mongodb_waitsfor_graph(arg)
- def mongodb_waitsfor_graph(self, file=None):
- """GDB in-process python supplement"""
+ @staticmethod
+ def mongodb_waitsfor_graph(graph_file=None):
+ """GDB in-process python supplement."""
graph = Graph()
try:
- thread_dict = get_threads_info(graph=graph)
+ thread_dict = get_threads_info()
get_locks(graph=graph, thread_dict=thread_dict, show=False)
graph.remove_nodes_without_edge()
if graph.is_empty():
@@ -351,10 +389,10 @@ class MongoDBWaitsForGraph(gdb.Command):
cycle_nodes = graph.detect_cycle()
if cycle_nodes:
cycle_message = "# Cycle detected in the graph nodes %s" % cycle_nodes
- if file:
- print("Saving digraph to %s" % file)
- with open(file, 'w') as f:
- f.write(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
+ if graph_file:
+ print("Saving digraph to %s" % graph_file)
+ with open(graph_file, 'w') as fh:
+ fh.write(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
print(cycle_message.split("# ")[1])
else:
print(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
diff --git a/buildscripts/gdb/mongo_printers.py b/buildscripts/gdb/mongo_printers.py
index 6c6d39e5ec7..6d1a5272d58 100644
--- a/buildscripts/gdb/mongo_printers.py
+++ b/buildscripts/gdb/mongo_printers.py
@@ -1,24 +1,24 @@
-"""GDB Pretty-printers for MongoDB
-"""
+"""GDB Pretty-printers for MongoDB."""
from __future__ import print_function
-import gdb.printing
import struct
import sys
+import gdb.printing
+
try:
import bson
import bson.json_util
import collections
from bson.codec_options import CodecOptions
-except ImportError as e:
+except ImportError as err:
print("Warning: Could not load bson library for Python '" + str(sys.version) + "'.")
print("Check with the pip command if pymongo 3.x is installed.")
bson = None
def get_unique_ptr(obj):
- """Read the value of a libstdc++ std::unique_ptr"""
+ """Read the value of a libstdc++ std::unique_ptr."""
return obj["_M_t"]['_M_head_impl']
@@ -30,13 +30,14 @@ def get_unique_ptr(obj):
class StatusPrinter(object):
- """Pretty-printer for mongo::Status"""
- OK = 0 # ErrorCodes::OK
+ """Pretty-printer for mongo::Status."""
def __init__(self, val):
+ """Initialize StatusPrinter."""
self.val = val
def to_string(self):
+ """Return status for printing."""
if not self.val['_error']:
return 'Status::OK()'
@@ -49,13 +50,15 @@ class StatusPrinter(object):
return 'Status(%s, %s)' % (code, reason)
-class StatusWithPrinter:
- """Pretty-printer for mongo::StatusWith<>"""
+class StatusWithPrinter(object):
+ """Pretty-printer for mongo::StatusWith<>."""
def __init__(self, val):
+ """Initialize StatusWithPrinter."""
self.val = val
def to_string(self):
+ """Return status for printing."""
if not self.val['_status']['_error']:
return 'StatusWith(OK, %s)' % (self.val['_t'])
@@ -69,27 +72,31 @@ class StatusWithPrinter:
return 'StatusWith(%s, %s)' % (code, reason)
-class StringDataPrinter:
- """Pretty-printer for mongo::StringData"""
+class StringDataPrinter(object):
+ """Pretty-printer for mongo::StringData."""
def __init__(self, val):
+ """Initialize StringDataPrinter."""
self.val = val
- def display_hint(self):
+ @staticmethod
+ def display_hint():
+ """Display hint."""
return 'string'
def to_string(self):
+ """Return data for printing."""
size = self.val["_size"]
if size == -1:
return self.val['_data'].lazy_string()
- else:
- return self.val['_data'].lazy_string(length=size)
+ return self.val['_data'].lazy_string(length=size)
-class BSONObjPrinter:
- """Pretty-printer for mongo::BSONObj"""
+class BSONObjPrinter(object):
+ """Pretty-printer for mongo::BSONObj."""
def __init__(self, val):
+ """Initialize BSONObjPrinter."""
self.val = val
self.ptr = self.val['_objdata'].cast(gdb.lookup_type('void').pointer())
# Handle the endianness of the BSON object size, which is represented as a 32-bit integer
@@ -101,10 +108,13 @@ class BSONObjPrinter:
else:
self.size = struct.unpack('<I', inferior.read_memory(self.ptr, 4))[0]
- def display_hint(self):
+ @staticmethod
+ def display_hint():
+ """Display hint."""
return 'map'
def children(self):
+ """Children."""
# Do not decode a BSONObj with an invalid size.
if not bson or self.size < 5 or self.size > 17 * 1024 * 1024:
return
@@ -114,11 +124,12 @@ class BSONObjPrinter:
options = CodecOptions(document_class=collections.OrderedDict)
bsondoc = bson.BSON.decode(buf, codec_options=options)
- for k, v in bsondoc.items():
- yield 'key', k
- yield 'value', bson.json_util.dumps(v)
+ for key, val in bsondoc.items():
+ yield 'key', key
+ yield 'value', bson.json_util.dumps(val)
def to_string(self):
+ """Return BSONObj for printing."""
# The value has been optimized out.
if self.size == -1:
return "BSONObj @ %s" % (self.ptr)
@@ -132,29 +143,33 @@ class BSONObjPrinter:
if size == 5:
return "%s empty BSONObj @ %s" % (ownership, self.ptr)
- else:
- return "%s BSONObj %s bytes @ %s" % (ownership, size, self.ptr)
+ return "%s BSONObj %s bytes @ %s" % (ownership, size, self.ptr)
-class UnorderedFastKeyTablePrinter:
- """Pretty-printer for mongo::UnorderedFastKeyTable<>"""
+class UnorderedFastKeyTablePrinter(object):
+ """Pretty-printer for mongo::UnorderedFastKeyTable<>."""
def __init__(self, val):
+ """Initialize UnorderedFastKeyTablePrinter."""
self.val = val
# Get the value_type by doing a type lookup
- valueTypeName = val.type.strip_typedefs().name + "::value_type"
- valueType = gdb.lookup_type(valueTypeName).target()
- self.valueTypePtr = valueType.pointer()
+ value_type_name = val.type.strip_typedefs().name + "::value_type"
+ value_type = gdb.lookup_type(value_type_name).target()
+ self.value_type_ptr = value_type.pointer()
- def display_hint(self):
+ @staticmethod
+ def display_hint():
+ """Display hint."""
return 'map'
def to_string(self):
+ """Return UnorderedFastKeyTablePrinter for printing."""
return "UnorderedFastKeyTablePrinter<%s> with %s elems " % (
self.val.type.template_argument(0), self.val["_size"])
def children(self):
+ """Children."""
cap = self.val["_area"]["_hashMask"] + 1
it = get_unique_ptr(self.val["_area"]["_entries"])
end = it + cap
@@ -168,16 +183,17 @@ class UnorderedFastKeyTablePrinter:
if not elt['_used']:
continue
- value = elt['_data']["__data"].cast(self.valueTypePtr).dereference()
+ value = elt['_data']["__data"].cast(self.value_type_ptr).dereference()
yield ('key', value['first'])
yield ('value', value['second'])
-class DecorablePrinter:
- """Pretty-printer for mongo::Decorable<>"""
+class DecorablePrinter(object):
+ """Pretty-printer for mongo::Decorable<>."""
def __init__(self, val):
+ """Initialize DecorablePrinter."""
self.val = val
decl_vector = val["_decorations"]["_registry"]["_decorationInfo"]
@@ -187,14 +203,18 @@ class DecorablePrinter:
decinfo_t = gdb.lookup_type('mongo::DecorationRegistry::DecorationInfo')
self.count = int((int(finish) - int(self.start)) / decinfo_t.sizeof)
- def display_hint(self):
+ @staticmethod
+ def display_hint():
+ """Display hint."""
return 'map'
def to_string(self):
+ """Return Decorable for printing."""
return "Decorable<%s> with %s elems " % (self.val.type.template_argument(0), self.count)
def children(self):
- decorationData = get_unique_ptr(self.val["_decorations"]["_decorationData"])
+ """Children."""
+ decoration_data = get_unique_ptr(self.val["_decorations"]["_decorationData"])
for index in range(self.count):
descriptor = self.start[index]
@@ -215,19 +235,20 @@ class DecorablePrinter:
# Cast the raw char[] into the actual object that is stored there.
type_t = gdb.lookup_type(type_name)
- obj = decorationData[dindex].cast(type_t)
+ obj = decoration_data[dindex].cast(type_t)
yield ('key', "%d:%s:%s" % (index, obj.address, type_name))
yield ('value', obj)
def find_match_brackets(search, opening='<', closing='>'):
- """Returns the index of the closing bracket that matches the first opening bracket.
- Returns -1 if no last matching bracket is found, i.e. not a template.
+ """Return the index of the closing bracket that matches the first opening bracket.
+
+ Return -1 if no last matching bracket is found, i.e. not a template.
- Example:
- 'Foo<T>::iterator<U>''
- returns 5
+ Example:
+ 'Foo<T>::iterator<U>''
+ returns 5
"""
index = search.find(opening)
if index == -1:
@@ -237,11 +258,11 @@ def find_match_brackets(search, opening='<', closing='>'):
count = 1
str_len = len(search)
for index in range(start, str_len):
- c = search[index]
+ char = search[index]
- if c == opening:
+ if char == opening:
count += 1
- elif c == closing:
+ elif char == closing:
count -= 1
if count == 0:
@@ -251,9 +272,10 @@ def find_match_brackets(search, opening='<', closing='>'):
class MongoSubPrettyPrinter(gdb.printing.SubPrettyPrinter):
- """Sub pretty printer managed by the pretty-printer collection"""
+ """Sub pretty printer managed by the pretty-printer collection."""
def __init__(self, name, prefix, is_template, printer):
+ """Initialize MongoSubPrettyPrinter."""
super(MongoSubPrettyPrinter, self).__init__(name)
self.prefix = prefix
self.printer = printer
@@ -262,16 +284,20 @@ class MongoSubPrettyPrinter(gdb.printing.SubPrettyPrinter):
class MongoPrettyPrinterCollection(gdb.printing.PrettyPrinter):
"""MongoDB-specific printer printer collection that ignores subtypes.
+
It will match 'HashTable<T> but not 'HashTable<T>::iterator' when asked for 'HashTable'.
"""
def __init__(self):
+ """Initialize MongoPrettyPrinterCollection."""
super(MongoPrettyPrinterCollection, self).__init__("mongo", [])
def add(self, name, prefix, is_template, printer):
+ """Add a subprinter."""
self.subprinters.append(MongoSubPrettyPrinter(name, prefix, is_template, printer))
def __call__(self, val):
+ """Return matched printer type."""
# Get the type name.
lookup_tag = gdb.types.get_basic_type(val.type).tag
@@ -286,15 +312,18 @@ class MongoPrettyPrinterCollection(gdb.printing.PrettyPrinter):
# We do not want HashTable<T>::iterator as an example, just HashTable<T>
if index == -1 or index + 1 == len(lookup_tag):
for printer in self.subprinters:
- if printer.enabled and (
- (printer.is_template and lookup_tag.find(printer.prefix) == 0) or
- (not printer.is_template and lookup_tag == printer.prefix)):
- return printer.printer(val)
+ if not printer.enabled:
+ continue
+ if ((not printer.is_template or lookup_tag.find(printer.prefix) != 0)
+ and (printer.is_template or lookup_tag != printer.prefix)):
+ continue
+ return printer.printer(val)
return None
def build_pretty_printer():
+ """Build a pretty printer."""
pp = MongoPrettyPrinterCollection()
pp.add('BSONObj', 'mongo::BSONObj', False, BSONObjPrinter)
pp.add('Decorable', 'mongo::Decorable', True, DecorablePrinter)
diff --git a/buildscripts/generate_compile_expansions.py b/buildscripts/generate_compile_expansions.py
index 527ba1b9d0c..28061857451 100755
--- a/buildscripts/generate_compile_expansions.py
+++ b/buildscripts/generate_compile_expansions.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python
"""
-This script generates the compile expansions file used by Evergreen as part of the push/release
-process.
+Generate the compile expansions file used by Evergreen as part of the push/release process.
Invoke by specifying an output file.
$ python generate_compile_expansions.py --out compile_expansions.yml
"""
+from __future__ import print_function
+
import argparse
import json
import os
@@ -14,7 +15,7 @@ import re
import sys
import yaml
-version_json = "version.json"
+VERSION_JSON = "version.json"
def generate_expansions():
@@ -25,7 +26,7 @@ def generate_expansions():
"""
args = parse_args()
expansions = {}
- expansions.update(generate_version_expansions(args))
+ expansions.update(generate_version_expansions())
expansions.update(generate_scons_cache_expansions())
with open(args.out, "w") as out:
@@ -34,18 +35,19 @@ def generate_expansions():
def parse_args():
+ """Parse program arguments."""
parser = argparse.ArgumentParser()
parser.add_argument("--out", required=True)
return parser.parse_args()
-def generate_version_expansions(args):
+def generate_version_expansions():
"""Generate expansions from a version.json file if given, or $MONGO_VERSION."""
expansions = {}
- if os.path.exists(version_json):
- with open(version_json, "r") as f:
- data = f.read()
+ if os.path.exists(VERSION_JSON):
+ with open(VERSION_JSON, "r") as fh:
+ data = fh.read()
version_data = json.loads(data)
version_line = version_data['version']
version_parts = match_verstr(version_line)
@@ -81,8 +83,8 @@ def generate_scons_cache_expansions():
default_cache_path_base = "/data/scons-cache"
if os.path.isfile(system_id_path):
- with open(system_id_path, "r") as f:
- default_cache_path = os.path.join(default_cache_path_base, f.readline().strip())
+ with open(system_id_path, "r") as fh:
+ default_cache_path = os.path.join(default_cache_path_base, fh.readline().strip())
expansions["scons_cache_path"] = default_cache_path
@@ -98,8 +100,7 @@ def generate_scons_cache_expansions():
def match_verstr(verstr):
- """
- This function matches a version string and captures the "extra" part.
+ """Match a version string and capture the "extra" part.
If the version is a release like "2.3.4" or "2.3.4-rc0", this will return
None. If the version is a pre-release like "2.3.4-325-githash" or
diff --git a/buildscripts/generate_compile_expansions_shared_cache.py b/buildscripts/generate_compile_expansions_shared_cache.py
index 57115e30038..64528e0d1d8 100755
--- a/buildscripts/generate_compile_expansions_shared_cache.py
+++ b/buildscripts/generate_compile_expansions_shared_cache.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python
"""
-This script generates the compile expansions file used by Evergreen as part of the push/release
-process.
+Generate the compile expansions file used by Evergreen as part of the push/release process.
Invoke by specifying an output file.
$ python generate_compile_expansions.py --out compile_expansions.yml
"""
+from __future__ import print_function
+
import argparse
import json
import os
@@ -14,7 +15,7 @@ import re
import sys
import yaml
-version_json = "version.json"
+VERSION_JSON = "version.json"
def generate_expansions():
@@ -25,7 +26,7 @@ def generate_expansions():
"""
args = parse_args()
expansions = {}
- expansions.update(generate_version_expansions(args))
+ expansions.update(generate_version_expansions())
expansions.update(generate_scons_cache_expansions())
with open(args.out, "w") as out:
@@ -34,18 +35,19 @@ def generate_expansions():
def parse_args():
+ """Parse program arguments."""
parser = argparse.ArgumentParser()
parser.add_argument("--out", required=True)
return parser.parse_args()
-def generate_version_expansions(args):
+def generate_version_expansions():
"""Generate expansions from a version.json file if given, or $MONGO_VERSION."""
expansions = {}
- if os.path.exists(version_json):
- with open(version_json, "r") as f:
- data = f.read()
+ if os.path.exists(VERSION_JSON):
+ with open(VERSION_JSON, "r") as fh:
+ data = fh.read()
version_data = json.loads(data)
version_line = version_data['version']
version_parts = match_verstr(version_line)
@@ -84,8 +86,8 @@ def generate_scons_cache_expansions():
system_id_path = "/etc/mongodb-build-system-id"
if os.path.isfile(system_id_path):
- with open(system_id_path, "r") as f:
- system_uuid = f.readline().strip()
+ with open(system_id_path, "r") as fh:
+ system_uuid = fh.readline().strip()
# Set the scons shared cache setting
@@ -124,8 +126,7 @@ def generate_scons_cache_expansions():
def match_verstr(verstr):
- """
- This function matches a version string and captures the "extra" part.
+ """Match a version string and capture the "extra" part.
If the version is a release like "2.3.4" or "2.3.4-rc0", this will return
None. If the version is a pre-release like "2.3.4-325-githash" or
diff --git a/buildscripts/git.py b/buildscripts/git.py
index 1f013c52ae2..028defd7766 100644
--- a/buildscripts/git.py
+++ b/buildscripts/git.py
@@ -22,17 +22,18 @@ if os.name == "posix" and sys.version_info[0] == 2:
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess
+ import subprocess # type: ignore
else:
import subprocess
LOGGER = logging.getLogger(__name__)
-class Repository(object):
+class Repository(object): # pylint: disable=too-many-public-methods
"""Represent a local git repository."""
def __init__(self, directory):
+ """Initialize Repository."""
self.directory = directory
def git_add(self, args):
@@ -173,7 +174,8 @@ class Repository(object):
def clone(url, directory, branch=None, depth=None):
"""Clone the repository designed by 'url' into 'directory'.
- Return a Repository instance."""
+ Return a Repository instance.
+ """
params = ["git", "clone"]
if branch:
params += ["--branch", branch]
@@ -188,7 +190,8 @@ class Repository(object):
def get_base_directory(directory=None):
"""Return the base directory of the repository the given directory belongs to.
- If no directory is specified, then the current working directory is used."""
+ If no directory is specified, then the current working directory is used.
+ """
if directory is not None:
params = ["git", "-C", directory]
else:
@@ -221,8 +224,7 @@ class Repository(object):
return result.returncode
def _run_cmd(self, cmd, args):
- """Run the git command and return a GitCommandResult instance.
- """
+ """Run the git command and return a GitCommandResult instance."""
params = ["git", cmd] + args
return self._run_process(cmd, params, cwd=self.directory)
@@ -251,8 +253,9 @@ class GitException(Exception):
stderr: the error output of the git command.
"""
- def __init__(self, message, returncode=None, cmd=None, process_args=None, stdout=None,
- stderr=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, message, returncode=None, cmd=None, process_args=None, stdout=None, stderr=None):
+ """Initialize GitException."""
Exception.__init__(self, message)
self.returncode = returncode
self.cmd = cmd
@@ -272,7 +275,9 @@ class GitCommandResult(object):
stderr: the error output of the command.
"""
- def __init__(self, cmd, process_args, returncode, stdout=None, stderr=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, cmd, process_args, returncode, stdout=None, stderr=None):
+ """Initialize GitCommandResult."""
self.cmd = cmd
self.process_args = process_args
self.returncode = returncode
diff --git a/buildscripts/hang_analyzer.py b/buildscripts/hang_analyzer.py
index c2c0fa05b2e..4935fb92754 100755
--- a/buildscripts/hang_analyzer.py
+++ b/buildscripts/hang_analyzer.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-"""Hang Analyzer
+"""Hang Analyzer module.
A prototype hang analyzer for Evergreen integration to help investigate test timeouts
@@ -25,11 +25,11 @@ import sys
import tempfile
import traceback
import time
-from distutils import spawn
+from distutils import spawn # pylint: disable=no-name-in-module
from optparse import OptionParser
-_is_windows = (sys.platform == "win32")
+_IS_WINDOWS = (sys.platform == "win32")
-if _is_windows:
+if _IS_WINDOWS:
import win32event
import win32api
@@ -39,11 +39,12 @@ if __name__ == "__main__" and __package__ is None:
from buildscripts.resmokelib import core
-def call(a, logger):
- logger.info(str(a))
+def call(args, logger):
+ """Call subprocess on args list."""
+ logger.info(str(args))
# Use a common pipe for stdout & stderr for logging.
- process = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logger_pipe = core.pipe.LoggerPipe(logger, logging.INFO, process.stdout)
logger_pipe.wait_until_started()
@@ -51,54 +52,58 @@ def call(a, logger):
logger_pipe.wait_until_finished()
if ret != 0:
- logger.error("Bad exit code %d" % (ret))
- raise Exception("Bad exit code %d from %s" % (ret, " ".join(a)))
+ logger.error("Bad exit code %d", ret)
+ raise Exception("Bad exit code %d from %s" % (ret, " ".join(args)))
-def callo(a, logger):
- logger.info("%s" % str(a))
+def callo(args, logger):
+ """Call subprocess on args string."""
+ logger.info("%s", str(args))
- return subprocess.check_output(a)
+ return subprocess.check_output(args)
def find_program(prog, paths):
- """Finds the specified program in env PATH, or tries a set of paths """
+ """Find the specified program in env PATH, or tries a set of paths."""
loc = spawn.find_executable(prog)
if loc is not None:
return loc
for loc in paths:
- p = os.path.join(loc, prog)
- if os.path.exists(p):
- return p
+ full_prog = os.path.join(loc, prog)
+ if os.path.exists(full_prog):
+ return full_prog
return None
def get_process_logger(debugger_output, pid, process_name):
- """Returns the process logger from options specified."""
+ """Return the process logger from options specified."""
process_logger = logging.Logger("process", level=logging.DEBUG)
process_logger.mongo_process_filename = None
if 'stdout' in debugger_output:
- handler = logging.StreamHandler(sys.stdout)
- handler.setFormatter(logging.Formatter(fmt="%(message)s"))
- process_logger.addHandler(handler)
+ s_handler = logging.StreamHandler(sys.stdout)
+ s_handler.setFormatter(logging.Formatter(fmt="%(message)s"))
+ process_logger.addHandler(s_handler)
if 'file' in debugger_output:
filename = "debugger_%s_%d.log" % (os.path.splitext(process_name)[0], pid)
process_logger.mongo_process_filename = filename
- handler = logging.FileHandler(filename=filename, mode="w")
- handler.setFormatter(logging.Formatter(fmt="%(message)s"))
- process_logger.addHandler(handler)
+ f_handler = logging.FileHandler(filename=filename, mode="w")
+ f_handler.setFormatter(logging.Formatter(fmt="%(message)s"))
+ process_logger.addHandler(f_handler)
return process_logger
class WindowsDumper(object):
- def __find_debugger(self, logger, debugger):
- """Finds the installed debugger"""
+ """WindowsDumper class."""
+
+ @staticmethod
+ def __find_debugger(logger, debugger):
+ """Find the installed debugger."""
# We are looking for c:\Program Files (x86)\Windows Kits\8.1\Debuggers\x64
cdb = spawn.find_executable(debugger)
if cdb is not None:
@@ -107,26 +112,27 @@ class WindowsDumper(object):
# Cygwin via sshd does not expose the normal environment variables
# Use the shell api to get the variable instead
- rootDir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILESX86, None, 0)
+ root_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILESX86, None, 0)
- for i in range(0, 2):
- pathToTest = os.path.join(rootDir, "Windows Kits", "8." + str(i), "Debuggers", "x64")
- logger.info("Checking for debugger in %s" % pathToTest)
- if (os.path.exists(pathToTest)):
- return os.path.join(pathToTest, debugger)
+ for idx in range(0, 2):
+ dbg_path = os.path.join(root_dir, "Windows Kits", "8." + str(idx), "Debuggers", "x64")
+ logger.info("Checking for debugger in %s", dbg_path)
+ if os.path.exists(dbg_path):
+ return os.path.join(dbg_path, debugger)
return None
- def dump_info(self, root_logger, logger, pid, process_name, take_dump):
- """Dump useful information to the console"""
+ def dump_info( # pylint: disable=too-many-arguments
+ self, root_logger, logger, pid, process_name, take_dump):
+ """Dump useful information to the console."""
debugger = "cdb.exe"
dbg = self.__find_debugger(root_logger, debugger)
if dbg is None:
- root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
+ root_logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
dump_command = ""
if take_dump:
@@ -134,7 +140,7 @@ class WindowsDumper(object):
dump_file = "dump_%s.%d.%s" % (os.path.splitext(process_name)[0], pid,
self.get_dump_ext())
dump_command = ".dump /ma %s" % dump_file
- root_logger.info("Dumping core to %s" % dump_file)
+ root_logger.info("Dumping core to %s", dump_file)
cmds = [
".symfix", # Fixup symbol path
@@ -151,48 +157,56 @@ class WindowsDumper(object):
call([dbg, '-c', ";".join(cmds), '-p', str(pid)], logger)
- root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
+ root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
- def get_dump_ext(self):
+ @staticmethod
+ def get_dump_ext():
+ """Return the dump file extension."""
return "mdmp"
class WindowsProcessList(object):
- def __find_ps(self):
- """Finds tasklist """
+ """WindowsProcessList class."""
+
+ @staticmethod
+ def __find_ps():
+ """Find tasklist."""
return os.path.join(os.environ["WINDIR"], "system32", "tasklist.exe")
def dump_processes(self, logger):
- """Get list of [Pid, Process Name]"""
+ """Get list of [Pid, Process Name]."""
ps = self.__find_ps()
- logger.info("Getting list of processes using %s" % ps)
+ logger.info("Getting list of processes using %s", ps)
ret = callo([ps, "/FO", "CSV"], logger)
- b = StringIO.StringIO(ret)
- csvReader = csv.reader(b)
+ buff = StringIO.StringIO(ret)
+ csv_reader = csv.reader(buff)
- p = [[int(row[1]), row[0]] for row in csvReader if row[1] != "PID"]
-
- return p
+ return [[int(row[1]), row[0]] for row in csv_reader if row[1] != "PID"]
# LLDB dumper is for MacOS X
class LLDBDumper(object):
- def __find_debugger(self, debugger):
- """Finds the installed debugger"""
+ """LLDBDumper class."""
+
+ @staticmethod
+ def __find_debugger(debugger):
+ """Find the installed debugger."""
return find_program(debugger, ['/usr/bin'])
- def dump_info(self, root_logger, logger, pid, process_name, take_dump):
+ def dump_info( # pylint: disable=too-many-arguments
+ self, root_logger, logger, pid, process_name, take_dump):
+ """Dump info."""
debugger = "lldb"
dbg = self.__find_debugger(debugger)
if dbg is None:
- root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
+ root_logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
lldb_version = callo([dbg, "--version"], logger)
@@ -217,7 +231,7 @@ class LLDBDumper(object):
# Dump to file, dump_<process name>.<pid>.core
dump_file = "dump_%s.%d.%s" % (process_name, pid, self.get_dump_ext())
dump_command = "process save-core %s" % dump_file
- root_logger.info("Dumping core to %s" % dump_file)
+ root_logger.info("Dumping core to %s", dump_file)
cmds = [
"attach -p %d" % pid,
@@ -230,8 +244,8 @@ class LLDBDumper(object):
tf = tempfile.NamedTemporaryFile()
- for c in cmds:
- tf.write(c + "\n")
+ for cmd in cmds:
+ tf.write(cmd + "\n")
tf.flush()
@@ -240,60 +254,68 @@ class LLDBDumper(object):
call(['cat', tf.name], logger)
call([dbg, '--source', tf.name], logger)
- root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
+ root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
- def get_dump_ext(self):
+ @staticmethod
+ def get_dump_ext():
+ """Return the dump file extension."""
return "core"
class DarwinProcessList(object):
- def __find_ps(self):
- """Finds ps"""
+ """DarwinProcessList class."""
+
+ @staticmethod
+ def __find_ps():
+ """Find ps."""
return find_program('ps', ['/bin'])
def dump_processes(self, logger):
- """Get list of [Pid, Process Name]"""
+ """Get list of [Pid, Process Name]."""
ps = self.__find_ps()
- logger.info("Getting list of processes using %s" % ps)
+ logger.info("Getting list of processes using %s", ps)
ret = callo([ps, "-axco", "pid,comm"], logger)
- b = StringIO.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
-
- p = [[int(row[0]), row[1]] for row in csvReader if row[0] != "PID"]
+ buff = StringIO.StringIO(ret)
+ csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
- return p
+ return [[int(row[0]), row[1]] for row in csv_reader if row[0] != "PID"]
# GDB dumper is for Linux & Solaris
class GDBDumper(object):
- def __find_debugger(self, debugger):
- """Finds the installed debugger"""
+ """GDBDumper class."""
+
+ @staticmethod
+ def __find_debugger(debugger):
+ """Find the installed debugger."""
return find_program(debugger, ['/opt/mongodbtoolchain/gdb/bin', '/usr/bin'])
- def dump_info(self, root_logger, logger, pid, process_name, take_dump):
+ def dump_info( # pylint: disable=too-many-arguments,too-many-locals
+ self, root_logger, logger, pid, process_name, take_dump):
+ """Dump info."""
debugger = "gdb"
dbg = self.__find_debugger(debugger)
if dbg is None:
- logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
+ logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
dump_command = ""
if take_dump:
# Dump to file, dump_<process name>.<pid>.core
dump_file = "dump_%s.%d.%s" % (process_name, pid, self.get_dump_ext())
dump_command = "gcore %s" % dump_file
- root_logger.info("Dumping core to %s" % dump_file)
+ root_logger.info("Dumping core to %s", dump_file)
call([dbg, "--version"], logger)
script_dir = os.path.dirname(os.path.abspath(__file__))
- root_logger.info("dir %s" % script_dir)
+ root_logger.info("dir %s", script_dir)
gdb_dir = os.path.join(script_dir, "gdb")
mongo_script = os.path.join(gdb_dir, "mongo.py")
mongo_printers_script = os.path.join(gdb_dir, "mongo_printers.py")
@@ -363,13 +385,16 @@ class GDBDumper(object):
call([dbg, "--quiet", "--nx"] +
list(itertools.chain.from_iterable([['-ex', b] for b in cmds])), logger)
- root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
+ root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
- def get_dump_ext(self):
+ @staticmethod
+ def get_dump_ext():
+ """Return the dump file extension."""
return "core"
- def _find_gcore(self):
- """Finds the installed gcore"""
+ @staticmethod
+ def _find_gcore():
+ """Find the installed gcore."""
dbg = "/usr/bin/gcore"
if os.path.exists(dbg):
return dbg
@@ -378,81 +403,90 @@ class GDBDumper(object):
class LinuxProcessList(object):
- def __find_ps(self):
- """Finds ps"""
+ """LinuxProcessList class."""
+
+ @staticmethod
+ def __find_ps():
+ """Find ps."""
return find_program('ps', ['/bin', '/usr/bin'])
def dump_processes(self, logger):
- """Get list of [Pid, Process Name]"""
+ """Get list of [Pid, Process Name]."""
ps = self.__find_ps()
- logger.info("Getting list of processes using %s" % ps)
+ logger.info("Getting list of processes using %s", ps)
call([ps, "--version"], logger)
ret = callo([ps, "-eo", "pid,args"], logger)
- b = StringIO.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
+ buff = StringIO.StringIO(ret)
+ csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
- p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
-
- return p
+ return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
class SolarisProcessList(object):
- def __find_ps(self):
- """Finds ps"""
+ """SolarisProcessList class."""
+
+ @staticmethod
+ def __find_ps():
+ """Find ps."""
return find_program('ps', ['/bin', '/usr/bin'])
def dump_processes(self, logger):
- """Get list of [Pid, Process Name]"""
+ """Get list of [Pid, Process Name]."""
ps = self.__find_ps()
- logger.info("Getting list of processes using %s" % ps)
+ logger.info("Getting list of processes using %s", ps)
ret = callo([ps, "-eo", "pid,args"], logger)
- b = StringIO.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
+ buff = StringIO.StringIO(ret)
+ csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
- p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
-
- return p
+ return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
# jstack is a JDK utility
class JstackDumper(object):
- def __find_debugger(self, debugger):
- """Finds the installed jstack debugger"""
+ """JstackDumper class."""
+
+ @staticmethod
+ def __find_debugger(debugger):
+ """Find the installed jstack debugger."""
return find_program(debugger, ['/usr/bin'])
def dump_info(self, root_logger, logger, pid, process_name):
- """Dump java thread stack traces to the console"""
+ """Dump java thread stack traces to the console."""
debugger = "jstack"
jstack = self.__find_debugger(debugger)
if jstack is None:
- logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
+ logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (jstack, process_name,
- pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d", jstack, process_name, pid)
call([jstack, "-l", str(pid)], logger)
- root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
+ root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
# jstack is a JDK utility
class JstackWindowsDumper(object):
- def dump_info(self, root_logger, logger, pid, process_name):
- """Dump java thread stack traces to the logger"""
+ """JstackWindowsDumper class."""
+
+ @staticmethod
+ def dump_info(root_logger, pid):
+ """Dump java thread stack traces to the logger."""
- root_logger.warning("Debugger jstack not supported, skipping dumping of %d" % (pid))
+ root_logger.warning("Debugger jstack not supported, skipping dumping of %d", pid)
def get_hang_analyzers():
+ """Return hang analyzers."""
+
dbg = None
jstack = None
ps = None
@@ -464,7 +498,7 @@ def get_hang_analyzers():
dbg = GDBDumper()
jstack = JstackDumper()
ps = SolarisProcessList()
- elif _is_windows or sys.platform == "cygwin":
+ elif _IS_WINDOWS or sys.platform == "cygwin":
dbg = WindowsDumper()
jstack = JstackWindowsDumper()
ps = WindowsProcessList()
@@ -477,7 +511,7 @@ def get_hang_analyzers():
def check_dump_quota(quota, ext):
- """Check if sum of the files with ext is within the specified quota in megabytes"""
+ """Check if sum of the files with ext is within the specified quota in megabytes."""
files = glob.glob("*." + ext)
@@ -485,11 +519,11 @@ def check_dump_quota(quota, ext):
for file_name in files:
size_sum += os.path.getsize(file_name)
- return (size_sum <= quota)
+ return size_sum <= quota
def signal_event_object(logger, pid):
- """Signal the Windows event object"""
+ """Signal the Windows event object."""
# Use unique event_name created.
event_name = "Global\\Mongo_Python_" + str(pid)
@@ -499,13 +533,13 @@ def signal_event_object(logger, pid):
inherit_handle = False
task_timeout_handle = win32event.OpenEvent(desired_access, inherit_handle, event_name)
except win32event.error as err:
- logger.info("Exception from win32event.OpenEvent with error: %s" % err)
+ logger.info("Exception from win32event.OpenEvent with error: %s", err)
return
try:
win32event.SetEvent(task_timeout_handle)
except win32event.error as err:
- logger.info("Exception from win32event.SetEvent with error: %s" % err)
+ logger.info("Exception from win32event.SetEvent with error: %s", err)
finally:
win32api.CloseHandle(task_timeout_handle)
@@ -514,23 +548,24 @@ def signal_event_object(logger, pid):
def signal_process(logger, pid, signalnum):
- """Signal process with signal, N/A on Windows"""
+ """Signal process with signal, N/A on Windows."""
try:
os.kill(pid, signalnum)
logger.info("Waiting for process to report")
time.sleep(5)
- except OSError, e:
- logger.error("Hit OS error trying to signal process: %s" % str(e))
+ except OSError, err:
+ logger.error("Hit OS error trying to signal process: %s", err)
except AttributeError:
logger.error("Cannot send signal to a process on Windows")
def pname_match(match_type, pname, interesting_processes):
+ """Return True if the pname matches in interesting_processes."""
pname = os.path.splitext(pname)[0]
for ip in interesting_processes:
- if (match_type == 'exact' and pname == ip or match_type == 'contains' and ip in pname):
+ if match_type == 'exact' and pname == ip or match_type == 'contains' and ip in pname:
return True
return False
@@ -539,32 +574,33 @@ def pname_match(match_type, pname, interesting_processes):
#
# 1. Get a list of interesting processes
# 2. Dump useful information or take dumps
-def main():
+def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ """Execute Main program."""
root_logger = logging.Logger("hang_analyzer", level=logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
root_logger.addHandler(handler)
- root_logger.info("Python Version: %s" % sys.version)
- root_logger.info("OS: %s" % platform.platform())
+ root_logger.info("Python Version: %s", sys.version)
+ root_logger.info("OS: %s", platform.platform())
try:
- if _is_windows or sys.platform == "cygwin":
+ if _IS_WINDOWS or sys.platform == "cygwin":
distro = platform.win32_ver()
- root_logger.info("Windows Distribution: %s" % str(distro))
+ root_logger.info("Windows Distribution: %s", distro)
else:
distro = platform.linux_distribution()
- root_logger.info("Linux Distribution: %s" % str(distro))
+ root_logger.info("Linux Distribution: %s", distro)
except AttributeError:
root_logger.warning("Cannot determine Linux distro since Python is too old")
try:
uid = os.getuid()
- root_logger.info("Current User: %s" % str(uid))
+ root_logger.info("Current User: %s", uid)
current_login = os.getlogin()
- root_logger.info("Current Login: %s" % current_login)
+ root_logger.info("Current Login: %s", current_login)
except OSError:
root_logger.warning("Cannot determine Unix Current Login")
except AttributeError:
@@ -577,10 +613,10 @@ def main():
parser = OptionParser(description=__doc__)
parser.add_option('-m', '--process-match', dest='process_match', choices=['contains', 'exact'],
default='contains',
- help=("Type of match for process names (-p & -g), specify 'contains', or"
- " 'exact'. Note that the process name match performs the following"
- " conversions: change all process names to lowecase, strip off the file"
- " extension, like '.exe' on Windows. Default is 'contains'."))
+ help="Type of match for process names (-p & -g), specify 'contains', or"
+ " 'exact'. Note that the process name match performs the following"
+ " conversions: change all process names to lowecase, strip off the file"
+ " extension, like '.exe' on Windows. Default is 'contains'.")
parser.add_option('-p', '--process-names', dest='process_names',
help='Comma separated list of process names to analyze')
parser.add_option('-g', '--go-process-names', dest='go_process_names',
@@ -594,15 +630,15 @@ def main():
help='Maximum total size of core dumps to keep in megabytes')
parser.add_option('-o', '--debugger-output', dest='debugger_output', action="append",
choices=['file', 'stdout'], default=None,
- help=("If 'stdout', then the debugger's output is written to the Python"
- " process's stdout. If 'file', then the debugger's output is written"
- " to a file named debugger_<process>_<pid>.log for each process it"
- " attaches to. This option can be specified multiple times on the"
- " command line to have the debugger's output written to multiple"
- " locations. By default, the debugger's output is written only to the"
- " Python process's stdout."))
+ help="If 'stdout', then the debugger's output is written to the Python"
+ " process's stdout. If 'file', then the debugger's output is written"
+ " to a file named debugger_<process>_<pid>.log for each process it"
+ " attaches to. This option can be specified multiple times on the"
+ " command line to have the debugger's output written to multiple"
+ " locations. By default, the debugger's output is written only to the"
+ " Python process's stdout.")
- (options, args) = parser.parse_args()
+ (options, _) = parser.parse_args()
if options.debugger_output is None:
options.debugger_output = ['stdout']
@@ -621,7 +657,7 @@ def main():
[ps, dbg, jstack] = get_hang_analyzers()
if ps is None or (dbg is None and jstack is None):
- root_logger.warning("hang_analyzer.py: Unsupported platform: %s" % (sys.platform))
+ root_logger.warning("hang_analyzer.py: Unsupported platform: %s", sys.platform)
exit(1)
all_processes = ps.dump_processes(root_logger)
@@ -640,14 +676,14 @@ def main():
running_pids = set([pid for (pid, pname) in all_processes])
missing_pids = set(process_ids) - running_pids
if missing_pids:
- root_logger.warning(
- "The following requested process ids are not running %s" % list(missing_pids))
+ root_logger.warning("The following requested process ids are not running %s",
+ list(missing_pids))
else:
processes = [(pid, pname) for (pid, pname) in all_processes
if pname_match(options.process_match, pname, interesting_processes)
and pid != os.getpid()]
- root_logger.info("Found %d interesting processes %s" % (len(processes), processes))
+ root_logger.info("Found %d interesting processes %s", len(processes), processes)
max_dump_size_bytes = int(options.max_core_dumps_size) * 1024 * 1024
@@ -656,13 +692,13 @@ def main():
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn.startswith("python")]:
# On Windows, we set up an event object to wait on a signal. For Cygwin, we register
# a signal handler to wait for the signal since it supports POSIX signals.
- if _is_windows:
- root_logger.info("Calling SetEvent to signal python process %s with PID %d" %
- (process_name, pid))
+ if _IS_WINDOWS:
+ root_logger.info("Calling SetEvent to signal python process %s with PID %d",
+ process_name, pid)
signal_event_object(root_logger, pid)
else:
- root_logger.info("Sending signal SIGUSR1 to python process %s with PID %d" %
- (process_name, pid))
+ root_logger.info("Sending signal SIGUSR1 to python process %s with PID %d",
+ process_name, pid)
signal_process(root_logger, pid, signal.SIGUSR1)
trapped_exceptions = []
@@ -674,26 +710,25 @@ def main():
try:
dbg.dump_info(root_logger, process_logger, pid, process_name, options.dump_core
and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
- except Exception as err:
- root_logger.info("Error encountered when invoking debugger %s" % err)
+ except Exception as err: # pylint: disable=broad-except
+ root_logger.info("Error encountered when invoking debugger %s", err)
trapped_exceptions.append(traceback.format_exc())
- # Dump java processes using jstack.
+ # Dump java processes using jstack.
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn.startswith("java")]:
process_logger = get_process_logger(options.debugger_output, pid, process_name)
try:
- jstack.dump_info(root_logger, process_logger, pid, process_name)
- except Exception as err:
- root_logger.info("Error encountered when invoking debugger %s" % err)
+ jstack.dump_info(root_logger, pid)
+ except Exception as err: # pylint: disable=broad-except
+ root_logger.info("Error encountered when invoking debugger %s", err)
trapped_exceptions.append(traceback.format_exc())
- # Signal go processes to ensure they print out stack traces, and die on POSIX OSes.
- # On Windows, this will simply kill the process since python emulates SIGABRT as
- # TerminateProcess.
- # Note: The stacktrace output may be captured elsewhere (i.e. resmoke).
+ # Signal go processes to ensure they print out stack traces, and die on POSIX OSes.
+ # On Windows, this will simply kill the process since python emulates SIGABRT as
+ # TerminateProcess.
+ # Note: The stacktrace output may be captured elsewhere (i.e. resmoke).
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn in go_processes]:
- root_logger.info("Sending signal SIGABRT to go process %s with PID %d" % (process_name,
- pid))
+ root_logger.info("Sending signal SIGABRT to go process %s with PID %d", process_name, pid)
signal_process(root_logger, pid, signal.SIGABRT)
root_logger.info("Done analyzing all processes for hangs")
diff --git a/buildscripts/idl/idl/binder.py b/buildscripts/idl/idl/binder.py
index f8809ea503d..c6bff11032a 100644
--- a/buildscripts/idl/idl/binder.py
+++ b/buildscripts/idl/idl/binder.py
@@ -649,11 +649,11 @@ def _bind_chained_struct(ctxt, parsed_spec, ast_struct, chained_struct):
chained_struct.name)
if not syntax_symbol:
- return None
+ return
if not isinstance(syntax_symbol, syntax.Struct) or isinstance(syntax_symbol, syntax.Command):
ctxt.add_chained_struct_not_found_error(ast_struct, chained_struct.name)
- return None
+ return
struct = cast(syntax.Struct, syntax_symbol)
@@ -808,5 +808,5 @@ def bind(parsed_spec):
if ctxt.errors.has_errors():
return ast.IDLBoundSpec(None, ctxt.errors)
- else:
- return ast.IDLBoundSpec(bound_spec, None)
+
+ return ast.IDLBoundSpec(bound_spec, None)
diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py
index bf7bbeeb629..d275872ca5a 100644
--- a/buildscripts/idl/idl/cpp_types.py
+++ b/buildscripts/idl/idl/cpp_types.py
@@ -155,10 +155,6 @@ class CppTypeBase(object):
class _CppTypeBasic(CppTypeBase):
"""Default class for C++ Type information. Does not handle view types."""
- def __init__(self, field):
- # type: (ast.Field) -> None
- super(_CppTypeBasic, self).__init__(field)
-
def get_type_name(self):
# type: () -> unicode
if self._field.struct_type:
@@ -284,10 +280,6 @@ class _CppTypeView(CppTypeBase):
class _CppTypeVector(CppTypeBase):
"""Base type for C++ Std::Vector Types information."""
- def __init__(self, field):
- # type: (ast.Field) -> None
- super(_CppTypeVector, self).__init__(field)
-
def get_type_name(self):
# type: () -> unicode
return 'std::vector<std::uint8_t>'
@@ -395,10 +387,6 @@ class _CppTypeDelegating(CppTypeBase):
class _CppTypeArray(_CppTypeDelegating):
"""C++ Array type for wrapping a base C++ Type information."""
- def __init__(self, base, field):
- # type: (CppTypeBase, ast.Field) -> None
- super(_CppTypeArray, self).__init__(base, field)
-
def get_storage_type(self):
# type: () -> unicode
return _qualify_array_type(self._base.get_storage_type())
@@ -422,8 +410,7 @@ class _CppTypeArray(_CppTypeDelegating):
convert = self.get_transform_to_getter_type(member_name)
if convert:
return common.template_args('return ${convert};', convert=convert)
- else:
- return self._base.get_getter_body(member_name)
+ return self._base.get_getter_body(member_name)
def get_setter_body(self, member_name):
# type: (unicode) -> unicode
@@ -431,8 +418,7 @@ class _CppTypeArray(_CppTypeDelegating):
if convert:
return common.template_args('${member_name} = ${convert};', member_name=member_name,
convert=convert)
- else:
- return self._base.get_setter_body(member_name)
+ return self._base.get_setter_body(member_name)
def get_transform_to_getter_type(self, expression):
# type: (unicode) -> Optional[unicode]
@@ -441,8 +427,7 @@ class _CppTypeArray(_CppTypeDelegating):
'transformVector(${expression})',
expression=expression,
)
- else:
- return None
+ return None
def get_transform_to_storage_type(self, expression):
# type: (unicode) -> Optional[unicode]
@@ -451,17 +436,12 @@ class _CppTypeArray(_CppTypeDelegating):
'transformVector(${expression})',
expression=expression,
)
- else:
- return None
+ return None
class _CppTypeOptional(_CppTypeDelegating):
"""Base type for Optional C++ Type information which wraps C++ types."""
- def __init__(self, base, field):
- # type: (CppTypeBase, ast.Field) -> None
- super(_CppTypeOptional, self).__init__(base, field)
-
def get_storage_type(self):
# type: () -> unicode
return _qualify_optional_type(self._base.get_storage_type())
@@ -502,8 +482,7 @@ class _CppTypeOptional(_CppTypeDelegating):
return common.template_args('return ${param_type}{${member_name}};',
param_type=self.get_getter_setter_type(),
member_name=member_name)
- else:
- return common.template_args('return ${member_name};', member_name=member_name)
+ return common.template_args('return ${member_name};', member_name=member_name)
def get_setter_body(self, member_name):
# type: (unicode) -> unicode
@@ -517,13 +496,11 @@ class _CppTypeOptional(_CppTypeDelegating):
${member_name} = boost::none;
}
"""), member_name=member_name, convert=convert)
- else:
- return self._base.get_setter_body(member_name)
+ return self._base.get_setter_body(member_name)
def get_cpp_type(field):
# type: (ast.Field) -> CppTypeBase
- # pylint: disable=redefined-variable-type
"""Get the C++ Type information for the given field."""
cpp_type_info = None # type: Any
@@ -533,7 +510,7 @@ def get_cpp_type(field):
elif field.cpp_type == 'std::vector<std::uint8_t>':
cpp_type_info = _CppTypeVector(field)
else:
- cpp_type_info = _CppTypeBasic(field) # pylint: disable=redefined-variable-type
+ cpp_type_info = _CppTypeBasic(field)
if field.array:
cpp_type_info = _CppTypeArray(cpp_type_info, field)
@@ -617,10 +594,6 @@ class _CommonBsonCppTypeBase(BsonCppTypeBase):
class _ObjectBsonCppTypeBase(BsonCppTypeBase):
"""Custom C++ support for object BSON types."""
- def __init__(self, field):
- # type: (ast.Field) -> None
- super(_ObjectBsonCppTypeBase, self).__init__(field)
-
def gen_deserializer_expression(self, indented_writer, object_instance):
# type: (writer.IndentedTextWriter, unicode) -> unicode
if self._field.deserializer:
@@ -630,9 +603,8 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
object_instance=object_instance))
return "localObject"
- else:
- # Just pass the BSONObj through without trying to parse it.
- return common.template_args('${object_instance}.Obj()', object_instance=object_instance)
+ # Just pass the BSONObj through without trying to parse it.
+ return common.template_args('${object_instance}.Obj()', object_instance=object_instance)
def has_serializer(self):
# type: () -> bool
@@ -650,18 +622,13 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
class _BinDataBsonCppTypeBase(BsonCppTypeBase):
"""Custom C++ support for all binData BSON types."""
- def __init__(self, field):
- # type: (ast.Field) -> None
- super(_BinDataBsonCppTypeBase, self).__init__(field)
-
def gen_deserializer_expression(self, indented_writer, object_instance):
# type: (writer.IndentedTextWriter, unicode) -> unicode
if self._field.bindata_subtype == 'uuid':
return common.template_args('${object_instance}.uuid()',
object_instance=object_instance)
- else:
- return common.template_args('${object_instance}._binDataVector()',
- object_instance=object_instance)
+ return common.template_args('${object_instance}._binDataVector()',
+ object_instance=object_instance)
def has_serializer(self):
# type: () -> bool
diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py
index d66fb4010d0..94351364510 100644
--- a/buildscripts/idl/idl/enum_types.py
+++ b/buildscripts/idl/idl/enum_types.py
@@ -117,10 +117,6 @@ class _EnumTypeInt(EnumTypeInfoBase):
__metaclass__ = ABCMeta
- def __init__(self, idl_enum):
- # type: (Union[syntax.Enum,ast.Enum]) -> None
- super(_EnumTypeInt, self).__init__(idl_enum)
-
def get_cpp_type_name(self):
# type: () -> unicode
return common.title_case(self._enum.name)
@@ -196,10 +192,6 @@ class _EnumTypeString(EnumTypeInfoBase):
__metaclass__ = ABCMeta
- def __init__(self, idl_enum):
- # type: (Union[syntax.Enum,ast.Enum]) -> None
- super(_EnumTypeString, self).__init__(idl_enum)
-
def get_cpp_type_name(self):
# type: () -> unicode
return common.template_args("${enum_name}Enum", enum_name=common.title_case(
diff --git a/buildscripts/idl/idl/errors.py b/buildscripts/idl/idl/errors.py
index 106644e0ded..1e19fc3013d 100644
--- a/buildscripts/idl/idl/errors.py
+++ b/buildscripts/idl/idl/errors.py
@@ -108,7 +108,7 @@ class ParserError(common.SourceLocation):
def __init__(self, error_id, msg, file_name, line, column):
# type: (unicode, unicode, unicode, int, int) -> None
- """"Construct a parser error with source location information."""
+ """Construct a parser error with source location information."""
# pylint: disable=too-many-arguments
self.error_id = error_id
self.msg = msg
@@ -116,8 +116,7 @@ class ParserError(common.SourceLocation):
def __str__(self):
# type: () -> str
- """
- Return a formatted error.
+ """Return a formatted error.
Example error message:
test.idl: (17, 4): ID0008: Unknown IDL node 'cpp_namespac' for YAML entity 'global'.
@@ -128,11 +127,11 @@ class ParserError(common.SourceLocation):
class ParserErrorCollection(object):
- """A collection of parser errors with source context information."""
+ """Collection of parser errors with source context information."""
def __init__(self):
# type: () -> None
- """Default constructor."""
+ """Initialize ParserErrorCollection."""
self._errors = [] # type: List[ParserError]
def add(self, location, error_id, msg):
@@ -310,9 +309,8 @@ class ParserContext(object):
assert self.is_scalar_sequence_or_scalar_node(node, "unknown")
if node.id == "scalar":
return [node.value]
- else:
- # Unzip the list of ScalarNode
- return [v.value for v in node.value]
+ # Unzip the list of ScalarNode
+ return [v.value for v in node.value]
def add_duplicate_error(self, node, node_name):
# type: (yaml.nodes.Node, unicode) -> None
diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py
index e4c20472816..ba39c3cb465 100644
--- a/buildscripts/idl/idl/generator.py
+++ b/buildscripts/idl/idl/generator.py
@@ -103,9 +103,8 @@ def _get_bson_type_check(bson_element, ctxt_name, field):
if not bson_types[0] == 'bindata':
return '%s.checkAndAssertType(%s, %s)' % (ctxt_name, bson_element,
bson.cpp_bson_type_name(bson_types[0]))
- else:
- return '%s.checkAndAssertBinDataType(%s, %s)' % (
- ctxt_name, bson_element, bson.cpp_bindata_subtype_type_name(field.bindata_subtype))
+ return '%s.checkAndAssertBinDataType(%s, %s)' % (
+ ctxt_name, bson_element, bson.cpp_bindata_subtype_type_name(field.bindata_subtype))
else:
type_list = '{%s}' % (', '.join([bson.cpp_bson_type_name(b) for b in bson_types]))
return '%s.checkAndAssertTypes(%s, %s)' % (ctxt_name, bson_element, type_list)
@@ -372,11 +371,6 @@ class _CppFileWriterBase(object):
class _CppHeaderFileWriter(_CppFileWriterBase):
"""C++ .h File writer."""
- def __init__(self, indented_writer):
- # type: (writer.IndentedTextWriter) -> None
- """Create a C++ .cpp file code writer."""
- super(_CppHeaderFileWriter, self).__init__(indented_writer)
-
def gen_class_declaration_block(self, class_name):
# type: (unicode) -> writer.IndentedScopedBlock
"""Generate a class declaration block."""
@@ -770,39 +764,37 @@ class _CppSourceFileWriter(_CppFileWriterBase):
elif field.deserializer and 'BSONElement::' in field.deserializer:
method_name = writer.get_method_name(field.deserializer)
return '%s.%s()' % (element_name, method_name)
- else:
- # Custom method, call the method on object.
- bson_cpp_type = cpp_types.get_bson_cpp_type(field)
- if bson_cpp_type:
- # Call a static class method with the signature:
- # Class Class::method(StringData value)
- # or
- # Class::method(const BSONObj& value)
- expression = bson_cpp_type.gen_deserializer_expression(self._writer, element_name)
- if field.deserializer:
- method_name = writer.get_method_name_from_qualified_method_name(
- field.deserializer)
+ # Custom method, call the method on object.
+ bson_cpp_type = cpp_types.get_bson_cpp_type(field)
- # For fields which are enums, pass a IDLParserErrorContext
- if field.enum_type:
- self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
- (_get_field_constant_name(field)))
- return common.template_args("${method_name}(tempContext, ${expression})",
- method_name=method_name, expression=expression)
- else:
- return common.template_args("${method_name}(${expression})",
- method_name=method_name, expression=expression)
- else:
- # BSONObjects are allowed to be pass through without deserialization
- assert field.bson_serialization_type == ['object']
- return expression
- else:
- # Call a static class method with the signature:
- # Class Class::method(const BSONElement& value)
+ if bson_cpp_type:
+ # Call a static class method with the signature:
+ # Class Class::method(StringData value)
+ # or
+ # Class::method(const BSONObj& value)
+ expression = bson_cpp_type.gen_deserializer_expression(self._writer, element_name)
+ if field.deserializer:
method_name = writer.get_method_name_from_qualified_method_name(field.deserializer)
- return '%s(%s)' % (method_name, element_name)
+ # For fields which are enums, pass a IDLParserErrorContext
+ if field.enum_type:
+ self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
+ (_get_field_constant_name(field)))
+ return common.template_args("${method_name}(tempContext, ${expression})",
+ method_name=method_name, expression=expression)
+ return common.template_args("${method_name}(${expression})",
+ method_name=method_name, expression=expression)
+
+ # BSONObjects are allowed to be pass through without deserialization
+ assert field.bson_serialization_type == ['object']
+ return expression
+
+ # Call a static class method with the signature:
+ # Class Class::method(const BSONElement& value)
+ method_name = writer.get_method_name_from_qualified_method_name(field.deserializer)
+
+ return '%s(%s)' % (method_name, element_name)
def _gen_array_deserializer(self, field, bson_element):
# type: (ast.Field, unicode) -> None
diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py
index 90ec25b6bb6..052f9a21782 100644
--- a/buildscripts/idl/idl/parser.py
+++ b/buildscripts/idl/idl/parser.py
@@ -22,9 +22,9 @@ from __future__ import absolute_import, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
import io
+from typing import Any, Callable, Dict, List, Set, Tuple, Union
import yaml
from yaml import nodes
-from typing import Any, Callable, Dict, List, Set, Tuple, Union
from . import common
from . import cpp_types
@@ -65,7 +65,7 @@ def _generic_parser(
syntax_node_name, # type: unicode
syntax_node, # type: Any
mapping_rules # type: Dict[unicode, _RuleDesc]
-):
+): # type: (...) -> None
# pylint: disable=too-many-branches
field_name_set = set() # type: Set[str]
@@ -129,7 +129,7 @@ def _parse_mapping(
node, # type: Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]
syntax_node_name, # type: unicode
func # type: Callable[[errors.ParserContext,syntax.IDLSpec,unicode,Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]], None]
-):
+): # type: (...) -> None
"""Parse a top-level mapping section in the IDL file."""
if not ctxt.is_mapping_node(node, syntax_node_name):
return
@@ -542,10 +542,10 @@ def _parse(stream, error_file_name):
if ctxt.errors.has_errors():
return syntax.IDLParsedSpec(None, ctxt.errors)
- else:
- _propagate_globals(spec)
- return syntax.IDLParsedSpec(spec, None)
+ _propagate_globals(spec)
+
+ return syntax.IDLParsedSpec(spec, None)
class ImportResolverBase(object):
@@ -625,7 +625,7 @@ def parse(stream, input_file_name, resolver):
return parsed_doc
# We need to generate includes for imported IDL files which have structs
- if base_file_name == input_file_name and len(parsed_doc.spec.symbols.structs):
+ if base_file_name == input_file_name and parsed_doc.spec.symbols.structs:
needs_include.append(imported_file_name)
# Add other imported files to the list of files to parse
diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py
index 4602bfa01a0..8e055fe8c45 100644
--- a/buildscripts/idl/idl/struct_types.py
+++ b/buildscripts/idl/idl/struct_types.py
@@ -315,22 +315,6 @@ class _IgnoredCommandTypeInfo(_CommandBaseTypeInfo):
common.title_case(self._struct.cpp_name), 'toBSON',
['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True)
- def get_deserializer_static_method(self):
- # type: () -> MethodInfo
- return super(_IgnoredCommandTypeInfo, self).get_deserializer_static_method()
-
- def get_deserializer_method(self):
- # type: () -> MethodInfo
- return super(_IgnoredCommandTypeInfo, self).get_deserializer_method()
-
- def gen_getter_method(self, indented_writer):
- # type: (writer.IndentedTextWriter) -> None
- super(_IgnoredCommandTypeInfo, self).gen_getter_method(indented_writer)
-
- def gen_member(self, indented_writer):
- # type: (writer.IndentedTextWriter) -> None
- super(_IgnoredCommandTypeInfo, self).gen_member(indented_writer)
-
def gen_serializer(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
indented_writer.write_line('builder->append("%s", 1);' % (self._command.name))
@@ -464,7 +448,6 @@ def get_struct_info(struct):
return _IgnoredCommandTypeInfo(struct)
elif struct.namespace == common.COMMAND_NAMESPACE_CONCATENATE_WITH_DB:
return _CommandWithNamespaceTypeInfo(struct)
- else:
- return _CommandFromType(struct)
+ return _CommandFromType(struct)
return _StructTypeInfo(struct)
diff --git a/buildscripts/idl/idlc.py b/buildscripts/idl/idlc.py
index 04ecb9ecfe7..f01d1555cb4 100644
--- a/buildscripts/idl/idlc.py
+++ b/buildscripts/idl/idlc.py
@@ -26,7 +26,7 @@ import idl.compiler
def main():
# type: () -> None
- """Main Entry point."""
+ """Execute Main Entry point."""
parser = argparse.ArgumentParser(description='MongoDB IDL Compiler.')
parser.add_argument('file', type=str, help="IDL input file")
diff --git a/buildscripts/idl/tests/testcase.py b/buildscripts/idl/tests/testcase.py
index ff96da01dde..ee011d15d29 100644
--- a/buildscripts/idl/tests/testcase.py
+++ b/buildscripts/idl/tests/testcase.py
@@ -39,11 +39,6 @@ def errors_to_str(errors):
class NothingImportResolver(idl.parser.ImportResolverBase):
"""An import resolver that does nothing."""
- def __init__(self):
- # type: () -> None
- """Construct a NothingImportResolver."""
- super(NothingImportResolver, self).__init__()
-
def resolve(self, base_file, imported_file_name):
# type: (unicode, unicode) -> unicode
"""Return the complete path to an imported file name."""
diff --git a/buildscripts/jiraclient.py b/buildscripts/jiraclient.py
index 7d798a9a4b5..86240865ad8 100644
--- a/buildscripts/jiraclient.py
+++ b/buildscripts/jiraclient.py
@@ -7,13 +7,15 @@ import jira
class JiraClient(object):
"""A client for JIRA."""
+
CLOSE_TRANSITION_NAME = "Close Issue"
RESOLVE_TRANSITION_NAME = "Resolve Issue"
FIXED_RESOLUTION_NAME = "Fixed"
WONT_FIX_RESOLUTION_NAME = "Won't Fix"
- def __init__(self, server, username=None, password=None, access_token=None,
- access_token_secret=None, consumer_key=None, key_cert=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, server, username=None, password=None, access_token=None, access_token_secret=None,
+ consumer_key=None, key_cert=None):
"""Initialize the JiraClient with the server URL and user credentials."""
opts = {"server": server, "verify": True}
basic_auth = None
diff --git a/buildscripts/test_failures.py b/buildscripts/lifecycle_test_failures.py
index 644eadb65e2..a0803a78ed5 100755
--- a/buildscripts/test_failures.py
+++ b/buildscripts/lifecycle_test_failures.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Utility for computing test failure rates from the Evergreen API.
-"""
+"""Utility for computing test failure rates from the Evergreen API."""
from __future__ import absolute_import
from __future__ import division
@@ -21,7 +19,7 @@ import warnings
try:
from urlparse import urlparse
except ImportError:
- from urllib.parse import urlparse
+ from urllib.parse import urlparse # type: ignore
import requests
import requests.exceptions
@@ -47,18 +45,17 @@ _ReportEntry = collections.namedtuple("_ReportEntry", [
class Wildcard(object):
- """
- A class for representing there are multiple values associated with a particular component.
- """
+ """Class for representing there are multiple values associated with a particular component."""
def __init__(self, kind):
+ """Initialize Wildcard."""
self._kind = kind
def __eq__(self, other):
if not isinstance(other, Wildcard):
return NotImplemented
- return self._kind == other._kind
+ return self._kind == other._kind # pylint: disable=protected-access
def __ne__(self, other):
return not self == other
@@ -71,9 +68,7 @@ class Wildcard(object):
class ReportEntry(_ReportEntry):
- """
- Holds information about Evergreen test executions.
- """
+ """Information about Evergreen test executions."""
_MULTIPLE_TESTS = Wildcard("tests")
_MULTIPLE_TASKS = Wildcard("tasks")
@@ -85,8 +80,7 @@ class ReportEntry(_ReportEntry):
@property
def fail_rate(self):
- """
- Returns the fraction of test failures to total number of test executions.
+ """Get the fraction of test failures to total number of test executions.
If a test hasn't been run at all, then we still say it has a failure rate of 0% for
convenience when applying thresholds.
@@ -97,9 +91,9 @@ class ReportEntry(_ReportEntry):
return self.num_fail / (self.num_pass + self.num_fail)
def period_start_date(self, start_date, period_size):
- """
- Returns a datetime.date() instance corresponding to the beginning of the time period
- containing 'self.start_date'.
+ """Return a datetime.date() instance for the period start date.
+
+ The result corresponds to the beginning of the time period containing 'self.start_date'.
"""
if not isinstance(start_date, datetime.date):
@@ -118,9 +112,10 @@ class ReportEntry(_ReportEntry):
return self.start_date - datetime.timedelta(days=start_day_offset)
def week_start_date(self, start_day_of_week):
- """
- Returns a datetime.date() instance corresponding to the beginning of the week containing
- 'self.start_date'. The first day of the week can be specified as the strings "Sunday" or
+ """Return a datetime.date() instance of the week's start date.
+
+ The result corresponds to the beginning of the week containing 'self.start_date'.
+ The first day of the week can be specified as the strings "Sunday" or
"Monday", as well as an arbitrary datetime.date() instance.
"""
@@ -144,9 +139,9 @@ class ReportEntry(_ReportEntry):
@classmethod
def sum(cls, entries):
- """
- Returns a single ReportEntry() instance corresponding to all test executions represented by
- 'entries'.
+ """Return a single ReportEntry() instance.
+
+ The result corresponds to all test executions represented by 'entries'.
"""
test = set()
@@ -179,9 +174,7 @@ class ReportEntry(_ReportEntry):
class Report(object):
- """
- A class for generating summarizations about Evergreen test executions.
- """
+ """Class for generating summarizations about Evergreen test executions."""
TEST = ("test", )
TEST_TASK = ("test", "task")
@@ -196,9 +189,7 @@ class Report(object):
FIRST_DAY = "first-day"
def __init__(self, entries):
- """
- Initializes the Report instance.
- """
+ """Initialize the Report instance."""
if not isinstance(entries, list):
# It is possible that 'entries' is a generator function, so we convert it to a list in
@@ -215,16 +206,15 @@ class Report(object):
@property
def raw_data(self):
- """
- Returns a copy of the list of ReportEntry instances underlying the report.
- """
+ """Get a copy of the list of ReportEntry instances underlying the report."""
return self._entries[:]
- def summarize_by(self, components, time_period=None, start_day_of_week=FIRST_DAY):
- """
- Returns a list of ReportEntry instances grouped by
+ def summarize_by( # pylint: disable=too-many-branches,too-many-locals
+ self, components, time_period=None, start_day_of_week=FIRST_DAY):
+ """Return a list of ReportEntry instances grouped by the following.
+ Grouping:
'components' if 'time_period' is None,
'components' followed by Entry.start_date if 'time_period' is "daily",
@@ -272,9 +262,9 @@ class Report(object):
" instance"))
def key_func(entry):
- """
- Assigns a key for sorting and grouping ReportEntry instances based on the combination of
- options summarize_by() was called with.
+ """Assign a key for sorting and grouping ReportEntry instances.
+
+ The result is based on the combination of options summarize_by() was called with.
"""
return [func(entry) for func in group_by]
@@ -303,18 +293,17 @@ class Report(object):
class Missing(object):
- """
- A class for representing the value associated with a particular component is unknown.
- """
+ """Class for representing the value associated with a particular component is unknown."""
def __init__(self, kind):
+ """Initialize Missing."""
self._kind = kind
def __eq__(self, other):
if not isinstance(other, Missing):
return NotImplemented
- return self._kind == other._kind
+ return self._kind == other._kind # pylint: disable=protected-access
def __ne__(self, other):
return not self == other
@@ -327,9 +316,7 @@ class Missing(object):
class TestHistory(object):
- """
- A class for interacting with the /test_history Evergreen API endpoint.
- """
+ """Class for interacting with the /test_history Evergreen API endpoint."""
DEFAULT_API_SERVER = "https://evergreen.mongodb.com"
DEFAULT_PROJECT = "mongodb-mongo-master"
@@ -345,11 +332,10 @@ class TestHistory(object):
_MISSING_DISTRO = Missing("distro")
- def __init__(self, api_server=DEFAULT_API_SERVER, project=DEFAULT_PROJECT, tests=None,
- tasks=None, variants=None, distros=None):
- """
- Initializes the TestHistory instance with the list of tests, tasks, variants, and distros
- specified.
+ def __init__( # pylint: disable=too-many-arguments
+ self, api_server=DEFAULT_API_SERVER, project=DEFAULT_PROJECT, tests=None, tasks=None,
+ variants=None, distros=None):
+ """Initialize the TestHistory instance with the list of tests, tasks, variants, and distros.
The list of tests specified are augmented to ensure that failures on both POSIX and Windows
platforms are returned by the Evergreen API.
@@ -374,9 +360,10 @@ class TestHistory(object):
def get_history_by_revision(self, start_revision, end_revision,
test_statuses=DEFAULT_TEST_STATUSES,
task_statuses=DEFAULT_TASK_STATUSES):
- """
- Returns a list of ReportEntry instances corresponding to each individual test execution
- between 'start_revision' and 'end_revision'.
+ """Return a list of ReportEntry instances.
+
+ The result corresponds to each individual test execution between 'start_revision' and
+ 'end_revision'.
Only tests with status 'test_statuses' are included in the result. Similarly, only tests
with status 'task_statuses' are included in the result. By default, both passing and failing
@@ -408,13 +395,14 @@ class TestHistory(object):
def get_history_by_date(self, start_date, end_date, test_statuses=DEFAULT_TEST_STATUSES,
task_statuses=DEFAULT_TASK_STATUSES):
- """
- Returns a list of ReportEntry instances corresponding to each individual test execution
- between 'start_date' and 'end_date'.
+ """Return a list of ReportEntry instances.
+
+ The result corresponds to each individual test execution between 'start_date' and
+ 'end_date'.
Only tests with status 'test_statuses' are included in the result. Similarly, only tests
- with status 'task_statuses' are included in the result. By default, both passing and failing
- test executions are returned.
+ with status 'task_statuses' are included in the result. By default, both passing and
+ failing test executions are returned.
"""
warnings.warn(
@@ -431,8 +419,8 @@ class TestHistory(object):
history_data = set()
# Since the API limits the results, with each invocation being distinct, we can simulate
- # pagination by making subsequent requests using "afterDate" and being careful to filter out
- # duplicate test results.
+ # pagination by making subsequent requests using "afterDate" and being careful to filter
+ # out duplicate test results.
while True:
params["afterDate"] = start_time
@@ -453,8 +441,7 @@ class TestHistory(object):
return list(history_data)
def _get_history(self, params):
- """
- Calls the test_history API endpoint with the given parameters and returns the JSON result.
+ """Call the test_history API endpoint with the given parameters and return the JSON result.
The API calls will be retried on HTTP and connection errors.
"""
@@ -496,9 +483,7 @@ class TestHistory(object):
raise JSONResponseError(err)
def _process_test_result(self, test_result):
- """
- Returns a ReportEntry() tuple representing the 'test_result' dictionary.
- """
+ """Return a ReportEntry() tuple representing the 'test_result' dictionary."""
# For individual test executions, we intentionally use the "start_time" of the test as both
# its 'start_date' and 'end_date' to avoid complicating how the test history is potentially
@@ -516,7 +501,8 @@ class TestHistory(object):
@staticmethod
def _normalize_test_file(test_file):
- """
+ """Return normalized test_file name.
+
If 'test_file' represents a Windows-style path, then it is converted to a POSIX-style path
with
@@ -536,8 +522,7 @@ class TestHistory(object):
return test_file
def _denormalize_test_file(self, test_file):
- """
- Returns a list containing 'test_file' as both a POSIX-style path and a Windows-style path.
+ """Return a list containing 'test_file' as both a POSIX-style and a Windows-style path.
The conversion process may involving replacing forward slashes (/) as the path separator
with backslashes (\\), as well as adding a ".exe" extension if 'test_file' has no file
@@ -555,9 +540,7 @@ class TestHistory(object):
return [test_file]
def _history_request_params(self, test_statuses, task_statuses):
- """
- Returns the query parameters for /test_history GET request as a dictionary.
- """
+ """Return the query parameters for /test_history GET request as a dictionary."""
return {
"distros": ",".join(self._distros),
@@ -571,8 +554,7 @@ class TestHistory(object):
def _parse_date(date_str):
- """
- Returns a datetime.date instance representing the specified yyyy-mm-dd date string.
+ """Return a datetime.date instance representing the specified yyyy-mm-dd date string.
Note that any time component of 'date_str', including the timezone, is ignored.
"""
@@ -584,16 +566,16 @@ def _parse_date(date_str):
class JSONResponseError(Exception):
"""An exception raised when failing to decode the JSON from an Evergreen response."""
- def __init__(self, cause):
- """Initializes the JSONResponseError with the exception raised by the requests library
- when decoding the response."""
+ def __init__(self, cause): # pylint: disable=super-init-not-called
+ """Initialize the JSONResponseError.
+
+ It it set with the exception raised by the requests library when decoding the response.
+ """
self.cause = cause
def main():
- """
- Utility computing test failure rates from the Evergreen API.
- """
+ """Execute computing test failure rates from the Evergreen API."""
parser = optparse.OptionParser(description=main.__doc__,
usage="Usage: %prog [options] [test1 test2 ...]")
@@ -695,7 +677,7 @@ def main():
def read_evg_config():
"""
- Attempts to parse the user's or system's Evergreen configuration from its known locations.
+ Attempt to parse the user's or system's Evergreen configuration from its known locations.
Returns None if the configuration file wasn't found anywhere.
"""
diff --git a/buildscripts/lint.py b/buildscripts/lint.py
index 68d0252b6ef..4a863aa3cc8 100644
--- a/buildscripts/lint.py
+++ b/buildscripts/lint.py
@@ -1,15 +1,23 @@
+"""Lint module."""
+
+from __future__ import print_function
+
import sys
import codecs
-import cpplint
-import utils
+import buildscripts.cpplint as cpplint
+import buildscripts.utils as utils
-class CheckForConfigH:
+class CheckForConfigH(object):
+ """CheckForConfigH class."""
+
def __init__(self):
+ """Initialize CheckForConfigH."""
self.found_configh = False
def __call__(self, filename, clean_lines, line_num, error):
+ """Check for a config file."""
if self.found_configh:
return
@@ -21,7 +29,8 @@ class CheckForConfigH:
'MONGO_CONFIG define used without prior inclusion of config.h.')
-def run_lint(paths, nudgeOn=False):
+def run_lint(paths, nudge_on=False):
+ """Run lint."""
# errors are as of 10/14
# idea is not to let it any new type of error
# as we knock one out, we should remove line
@@ -70,25 +79,26 @@ def run_lint(paths, nudgeOn=False):
nudge.append('-whitespace/tab') # errors found: 233
filters = later + never
- if not nudgeOn:
+ if not nudge_on:
filters = filters + nudge
- sourceFiles = []
- for x in paths:
- utils.getAllSourceFiles(sourceFiles, x)
+ source_files = []
+ for path in paths:
+ utils.get_all_source_files(source_files, path)
- args = ["--linelength=100", "--filter=" + ",".join(filters), "--counting=detailed"
- ] + sourceFiles
+ args = \
+ ["--linelength=100", "--filter=" + ",".join(filters), "--counting=detailed"] + source_files
filenames = cpplint.ParseArguments(args)
- def _ourIsTestFilename(fn):
- if fn.find("dbtests") >= 0:
+ def _our_is_test_filename(file_name):
+ if file_name.find("dbtests") >= 0:
return True
- if fn.endswith("_test.cpp"):
+ if file_name.endswith("_test.cpp"):
return True
return False
- cpplint._IsTestFilename = _ourIsTestFilename
+ # pylint: disable=protected-access
+ cpplint._IsTestFilename = _our_is_test_filename
# Change stderr to write with replacement characters so we don't die
# if we try to print something containing non-ASCII characters.
@@ -102,9 +112,12 @@ def run_lint(paths, nudgeOn=False):
cpplint._cpplint_state.PrintErrorCounts()
return cpplint._cpplint_state.error_count == 0
+ # pylint: enable=protected-access
-if __name__ == "__main__":
+def main():
+ """Execute Main program."""
+
paths = []
nudge = False
@@ -119,8 +132,12 @@ if __name__ == "__main__":
sys.exit(-1)
paths.append(arg)
- if len(paths) == 0:
+ if not paths:
paths.append("src/mongo/")
if not run_lint(paths, nudge):
sys.exit(-1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/linter/base.py b/buildscripts/linter/base.py
index ae78d520664..f22f59e4f01 100644
--- a/buildscripts/linter/base.py
+++ b/buildscripts/linter/base.py
@@ -11,16 +11,18 @@ class LinterBase(object):
__metaclass__ = ABCMeta
- def __init__(self, cmd_name, required_version):
- # type: (str, str) -> None
+ def __init__(self, cmd_name, required_version, cmd_location=None):
+ # type: (str, str, Optional[str]) -> None
"""
Create a linter.
cmd_name - short friendly name
required_version - the required version string to check against
+ cmd_location - location of executable
"""
self.cmd_name = cmd_name
self.required_version = required_version
+ self.cmd_location = cmd_location
@abstractmethod
def get_lint_cmd_args(self, file_name):
diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py
index edde6d0a494..b4a68986040 100644
--- a/buildscripts/linter/git.py
+++ b/buildscripts/linter/git.py
@@ -69,7 +69,7 @@ class Repo(_git.Repository):
Returns the full path to the file for clang-format to consume.
"""
- if candidates is not None and len(candidates) > 0:
+ if candidates is not None and len(candidates) > 0: # pylint: disable=len-as-condition
candidates = [self._get_local_dir(f) for f in candidates]
valid_files = list(
set(candidates).intersection(self.get_candidate_files(filter_function)))
@@ -150,7 +150,7 @@ def get_files_to_check(files, filter_function):
candidates_nested = [expand_file_string(f) for f in files]
candidates = list(itertools.chain.from_iterable(candidates_nested))
- if len(files) > 0 and len(candidates) == 0:
+ if files and not candidates:
raise ValueError("Globs '%s' did not find any files with glob." % (files))
repos = get_repos()
@@ -159,7 +159,7 @@ def get_files_to_check(files, filter_function):
itertools.chain.from_iterable(
[r.get_candidates(candidates, filter_function) for r in repos]))
- if len(files) > 0 and len(valid_files) == 0:
+ if files and not valid_files:
raise ValueError("Globs '%s' did not find any files with glob in git." % (files))
return valid_files
diff --git a/buildscripts/linter/mypy.py b/buildscripts/linter/mypy.py
index 9526b388c75..c720ae8f870 100644
--- a/buildscripts/linter/mypy.py
+++ b/buildscripts/linter/mypy.py
@@ -2,6 +2,7 @@
from __future__ import absolute_import
from __future__ import print_function
+import os
from typing import List
from . import base
@@ -13,7 +14,9 @@ class MypyLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a mypy linter."""
- super(MypyLinter, self).__init__("mypy", "mypy 0.501")
+ # User can override the location of mypy from an environment variable.
+
+ super(MypyLinter, self).__init__("mypy", "mypy 0.580", os.getenv("MYPY"))
def get_lint_version_cmd_args(self):
# type: () -> List[str]
@@ -23,17 +26,7 @@ class MypyLinter(base.LinterBase):
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
- # -py2 - Check Python 2 code for type annotations in comments
- # --disallow-untyped-defs - Error if any code is missing type annotations
- # --ignore-missing-imports - Do not error if imports are not found. This can be a problem
- # with standalone scripts and relative imports. This will limit effectiveness but avoids
- # mypy complaining about running code.
- # --follow-imports=silent - Do not error on imported files since all imported files may not
- # be mypy clean
- return [
- "--py2", "--disallow-untyped-defs", "--ignore-missing-imports",
- "--follow-imports=silent", file_name
- ]
+ return [file_name]
def ignore_interpreter(self):
# type: () -> bool
diff --git a/buildscripts/linter/pydocstyle.py b/buildscripts/linter/pydocstyle.py
index 9420d42354c..b259becfd1c 100644
--- a/buildscripts/linter/pydocstyle.py
+++ b/buildscripts/linter/pydocstyle.py
@@ -13,7 +13,7 @@ class PyDocstyleLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a pydocstyle linter."""
- super(PyDocstyleLinter, self).__init__("pydocstyle", "1.1.1")
+ super(PyDocstyleLinter, self).__init__("pydocstyle", "2.1.1")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
diff --git a/buildscripts/linter/pylint.py b/buildscripts/linter/pylint.py
index 056465501d8..12dbcfdf6d3 100644
--- a/buildscripts/linter/pylint.py
+++ b/buildscripts/linter/pylint.py
@@ -15,9 +15,7 @@ class PyLintLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a pylint linter."""
- self._rc_file = os.path.join(
- os.path.normpath(git.get_base_dir()), "buildscripts", ".pylintrc")
- super(PyLintLinter, self).__init__("pylint", "pylint 1.6.5")
+ super(PyLintLinter, self).__init__("pylint", "pylint 1.8.3")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
@@ -27,10 +25,4 @@ class PyLintLinter(base.LinterBase):
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
- # pylintrc only searches parent directories if it is a part of a module, and since our code
- # is split across different modules, and individual script files, we need to specify the
- # path to the rcfile.
- # See https://pylint.readthedocs.io/en/latest/user_guide/run.html
- return [
- "--rcfile=%s" % (self._rc_file), "--output-format", "msvs", "--reports=n", file_name
- ]
+ return ["--output-format=msvs", "--reports=n", file_name]
diff --git a/buildscripts/linter/runner.py b/buildscripts/linter/runner.py
index f3ff287ee95..67c69d25d02 100644
--- a/buildscripts/linter/runner.py
+++ b/buildscripts/linter/runner.py
@@ -88,7 +88,11 @@ def _find_linter(linter, config_dict):
if linter.ignore_interpreter():
# Some linters use a different interpreter then the current interpreter.
- cmd_str = os.path.join('/opt/mongodbtoolchain/v2/bin', linter.cmd_name)
+ # If the linter cmd_location is specified then use that location.
+ if linter.cmd_location:
+ cmd_str = linter.cmd_location
+ else:
+ cmd_str = os.path.join('/opt/mongodbtoolchain/v2/bin', linter.cmd_name)
cmd = [cmd_str]
else:
cmd = [sys.executable, cmd_str]
diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py
index f3537cebc25..a2681a0efdc 100755
--- a/buildscripts/make_archive.py
+++ b/buildscripts/make_archive.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-'''Helper script for constructing an archive (zip or tar) from a list of files.
+"""Helper script for constructing an archive (zip or tar) from a list of files.
The output format (tar, tgz, zip) is determined from the file name, unless the user specifies
--format on the command line.
@@ -10,8 +10,8 @@ directory in the archive, perhaps mongodb-2.0.2/src/mongo.
Usage:
-make_archive.py -o <output-file> [--format (tar|tgz|zip)] \
- [--transform match1=replacement1 [--transform match2=replacement2 [...]]] \
+make_archive.py -o <output-file> [--format (tar|tgz|zip)] \\
+ [--transform match1=replacement1 [--transform match2=replacement2 [...]]] \\
<input file 1> [...]
If the input file names start with "@", the file is expected to contain a list of
@@ -23,7 +23,7 @@ match1, it is never compared against match2 or later. Matches are just python s
comparisons.
For a detailed usage example, see src/SConscript.client or src/mongo/SConscript.
-'''
+"""
import optparse
import os
@@ -36,6 +36,7 @@ from subprocess import (Popen, PIPE, STDOUT)
def main(argv):
+ """Execute Main program."""
args = []
for arg in argv[1:]:
if arg.startswith("@"):
@@ -49,23 +50,24 @@ def main(argv):
opts = parse_options(args)
if opts.archive_format in ('tar', 'tgz'):
make_tar_archive(opts)
- elif opts.archive_format in ('zip'):
+ elif opts.archive_format == 'zip':
make_zip_archive(opts)
else:
raise ValueError('Unsupported archive format "%s"' % opts.archive_format)
-def delete_directory(dir):
- '''Recursively deletes a directory and its contents.
- '''
+def delete_directory(directory):
+ """Recursively deletes a directory and its contents."""
try:
- shutil.rmtree(dir)
- except Exception:
+ shutil.rmtree(directory)
+ except Exception: # pylint: disable=broad-except
pass
def make_tar_archive(opts):
- '''Given the parsed options, generates the 'opt.output_filename'
+ """Generate tar archive.
+
+ Given the parsed options, generates the 'opt.output_filename'
tarball containing all the files in 'opt.input_filename' renamed
according to the mappings in 'opts.transformations'.
@@ -77,9 +79,9 @@ def make_tar_archive(opts):
required by 'opts.transformations'. Once the tarball has been
created, all temporary directory structures created for the
purposes of compressing, are removed.
- '''
+ """
tar_options = "cvf"
- if opts.archive_format is 'tgz':
+ if opts.archive_format == 'tgz':
tar_options += "z"
# clean and create a temp directory to copy files to
@@ -112,13 +114,15 @@ def make_tar_archive(opts):
def make_zip_archive(opts):
- '''Given the parsed options, generates the 'opt.output_filename'
+ """Generate the zip archive.
+
+ Given the parsed options, generates the 'opt.output_filename'
zipfile containing all the files in 'opt.input_filename' renamed
according to the mappings in 'opts.transformations'.
All files in 'opt.output_filename' are renamed before being
written into the zipfile.
- '''
+ """
archive = open_zip_archive_for_write(opts.output_filename)
try:
for input_filename in opts.input_filenames:
@@ -129,6 +133,7 @@ def make_zip_archive(opts):
def parse_options(args):
+ """Parse program options."""
parser = optparse.OptionParser()
parser.add_option('-o', dest='output_filename', default=None,
help='Name of the archive to output.', metavar='FILE')
@@ -166,30 +171,34 @@ def parse_options(args):
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=', 1)
for xform in opts.transformations
]
- except Exception, e:
- parser.error(e)
+ except Exception, err: # pylint: disable=broad-except
+ parser.error(err)
return opts
def open_zip_archive_for_write(filename):
- '''Open a zip archive for writing and return it.
- '''
+ """Open a zip archive for writing and return it."""
# Infuriatingly, Zipfile calls the "add" method "write", but they're otherwise identical,
# for our purposes. WrappedZipFile is a minimal adapter class.
class WrappedZipFile(zipfile.ZipFile):
+ """WrappedZipFile class."""
+
def add(self, filename, arcname):
+ """Add filename to zip."""
return self.write(filename, arcname)
return WrappedZipFile(filename, 'w', zipfile.ZIP_DEFLATED)
def get_preferred_filename(input_filename, transformations):
- '''Does a prefix subsitution on 'input_filename' for the
+ """Return preferred filename.
+
+ Perform a prefix subsitution on 'input_filename' for the
first matching transformation in 'transformations' and
- returns the substituted string
- '''
+ returns the substituted string.
+ """
for match, replace in transformations:
match_lower = match.lower()
input_filename_lower = input_filename.lower()
diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py
index c0d18fffd2f..730f4e20726 100644
--- a/buildscripts/make_vcxproj.py
+++ b/buildscripts/make_vcxproj.py
@@ -1,16 +1,17 @@
-# Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015.
-# To build mongodb, you must use scons. You can use this project to navigate code during debugging.
-#
-# HOW TO USE
-#
-# First, you need a compile_commands.json file, to generate run the following command:
-# scons compiledb
-#
-# Next, run the following command
-# python buildscripts/make_vcxproj.py FILE_NAME
-#
-# where FILE_NAME is the of the file to generate e.g., "mongod"
-#
+"""Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015.
+
+To build mongodb, you must use scons. You can use this project to navigate code during debugging.
+
+ HOW TO USE
+
+ First, you need a compile_commands.json file, to generate run the following command:
+ scons compiledb
+
+ Next, run the following command
+ python buildscripts/make_vcxproj.py FILE_NAME
+
+ where FILE_NAME is the of the file to generate e.g., "mongod"
+"""
import json
import os
@@ -35,7 +36,7 @@ VCXPROJ_FOOTER = r"""
def get_defines(args):
- """Parse a compiler argument list looking for defines"""
+ """Parse a compiler argument list looking for defines."""
ret = set()
for arg in args:
if arg.startswith('/D'):
@@ -44,7 +45,7 @@ def get_defines(args):
def get_includes(args):
- """Parse a compiler argument list looking for includes"""
+ """Parse a compiler argument list looking for includes."""
ret = set()
for arg in args:
if arg.startswith('/I'):
@@ -52,10 +53,11 @@ def get_includes(args):
return ret
-class ProjFileGenerator(object):
- """Generate a .vcxproj and .vcxprof.filters file"""
+class ProjFileGenerator(object): # pylint: disable=too-many-instance-attributes
+ """Generate a .vcxproj and .vcxprof.filters file."""
def __init__(self, target):
+ """Initialize ProjFileGenerator."""
# we handle DEBUG in the vcxproj header:
self.common_defines = set()
self.common_defines.add("DEBUG")
@@ -84,8 +86,8 @@ class ProjFileGenerator(object):
self.vcxproj.write(header_str)
common_defines = self.all_defines
- for c in self.compiles:
- common_defines = common_defines.intersection(c['defines'])
+ for comp in self.compiles:
+ common_defines = common_defines.intersection(comp['defines'])
self.vcxproj.write("<!-- common_defines -->\n")
self.vcxproj.write("<ItemDefinitionGroup><ClCompile><PreprocessorDefinitions>" +
@@ -95,7 +97,7 @@ class ProjFileGenerator(object):
self.vcxproj.write(" <ItemGroup>\n")
for command in self.compiles:
defines = command["defines"].difference(common_defines)
- if len(defines) > 0:
+ if defines:
self.vcxproj.write(
" <ClCompile Include=\"" + command["file"] + "\"><PreprocessorDefinitions>" +
';'.join(defines) + ";%(PreprocessorDefinitions)" +
@@ -118,12 +120,12 @@ class ProjFileGenerator(object):
self.filters.close()
def parse_line(self, line):
- """Parse a build line"""
+ """Parse a build line."""
if line.startswith("cl"):
self.__parse_cl_line(line[3:])
def __parse_cl_line(self, line):
- """Parse a compiler line"""
+ """Parse a compiler line."""
# Get the file we are compilong
file_name = re.search(r"/c ([\w\\.-]+) ", line).group(1)
@@ -146,16 +148,17 @@ class ProjFileGenerator(object):
self.compiles.append({"file": file_name, "defines": file_defines})
- def __is_header(self, name):
- """Is this a header file?"""
+ @staticmethod
+ def __is_header(name):
+ """Return True if this a header file."""
headers = [".h", ".hpp", ".hh", ".hxx"]
for header in headers:
if name.endswith(header):
return True
return False
- def __write_filters(self):
- """Generate the vcxproj.filters file"""
+ def __write_filters(self): # pylint: disable=too-many-branches
+ """Generate the vcxproj.filters file."""
# 1. get a list of directories for all the files
# 2. get all the headers in each of these dirs
# 3. Output these lists of files to vcxproj and vcxproj.headers
@@ -191,7 +194,7 @@ class ProjFileGenerator(object):
for directory in dirs:
if os.path.exists(directory):
for file_name in os.listdir(directory):
- if "SConstruct" == file_name or "SConscript" in file_name:
+ if file_name == "SConstruct" or "SConscript" in file_name:
scons_files.add(directory + "\\" + file_name)
scons_files.add("SConstruct")
@@ -244,6 +247,7 @@ class ProjFileGenerator(object):
def main():
+ """Execute Main program."""
if len(sys.argv) != 2:
print r"Usage: python buildscripts\make_vcxproj.py FILE_NAME"
return
diff --git a/buildscripts/moduleconfig.py b/buildscripts/moduleconfig.py
index e6f39b65d5e..29c2d99780c 100644
--- a/buildscripts/moduleconfig.py
+++ b/buildscripts/moduleconfig.py
@@ -26,7 +26,7 @@ MongoDB SConscript files do.
from __future__ import print_function
__all__ = ('discover_modules', 'discover_module_directories', 'configure_modules',
- 'register_module_test')
+ 'register_module_test') # pylint: disable=undefined-all-variable
import imp
import inspect
@@ -34,9 +34,9 @@ import os
def discover_modules(module_root, allowed_modules):
- """Scans module_root for subdirectories that look like MongoDB modules.
+ """Scan module_root for subdirectories that look like MongoDB modules.
- Returns a list of imported build.py module objects.
+ Return a list of imported build.py module objects.
"""
found_modules = []
@@ -74,9 +74,9 @@ def discover_modules(module_root, allowed_modules):
def discover_module_directories(module_root, allowed_modules):
- """Scans module_root for subdirectories that look like MongoDB modules.
+ """Scan module_root for subdirectories that look like MongoDB modules.
- Returns a list of directory names.
+ Return a list of directory names.
"""
if not os.path.isdir(module_root):
return []
@@ -105,23 +105,23 @@ def discover_module_directories(module_root, allowed_modules):
def configure_modules(modules, conf):
- """ Run the configure() function in the build.py python modules for each module in "modules"
- (as created by discover_modules).
+ """Run the configure() function in the build.py python modules for each module in "modules".
+
+ The modules were created by discover_modules.
The configure() function should prepare the Mongo build system for building the module.
"""
for module in modules:
name = module.name
print("configuring module: %s" % (name))
-
- root = os.path.dirname(module.__file__)
module.configure(conf, conf.env)
def get_module_sconscripts(modules):
+ """Return all modules' sconscripts."""
sconscripts = []
- for m in modules:
- module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(m.__file__)))
+ for mod in modules:
+ module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(mod.__file__)))
sconscripts.append(os.path.join(module_dir_path, 'SConscript'))
return sconscripts
@@ -142,8 +142,9 @@ def __get_src_relative_path(path):
def __get_module_path(module_frame_depth):
- """Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames
- above this function, relative to the "src" directory.
+ """Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames.
+
+ This is above this function, relative to the "src" directory.
"""
module_filename = inspect.stack()[module_frame_depth + 1][1]
return os.path.dirname(__get_src_relative_path(module_filename))
@@ -185,8 +186,9 @@ def get_current_module_build_path():
def get_current_module_libdep_name(libdep_rel_path):
- """Return a $BUILD_DIR relative path to a "libdep_rel_path", where "libdep_rel_path"
- is specified relative to the MongoDB module's build.py file.
+ """Return a $BUILD_DIR relative path to a "libdep_rel_path".
+
+ The "libdep_rel_path" is relative to the MongoDB module's build.py file.
May only meaningfully be called from within build.py
"""
diff --git a/buildscripts/mongosymb.py b/buildscripts/mongosymb.py
index a38c395978d..bf11ff872c5 100755
--- a/buildscripts/mongosymb.py
+++ b/buildscripts/mongosymb.py
@@ -24,9 +24,9 @@ import subprocess
import sys
-def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hint=None):
- """Given a trace_doc in MongoDB stack dump format, returns a list of symbolized stack frames.
- """
+def symbolize_frames( # pylint: disable=too-many-locals
+ trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hint=None):
+ """Return a list of symbolized stack frames from a trace_doc in MongoDB stack dump format."""
if symbolizer_path is None:
symbolizer_path = os.environ.get("MONGOSYMB_SYMBOLIZER_PATH", "llvm-symbolizer")
@@ -34,8 +34,9 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
dsym_hint = []
def make_base_addr_map(somap_list):
- """Makes a map from binary load address to description of library from the somap, which is
- a list of dictionaries describing individual loaded libraries.
+ """Return map from binary load address to description of library from the somap_list.
+
+ The somap_list is a list of dictionaries describing individual loaded libraries.
"""
return {so_entry["b"]: so_entry for so_entry in somap_list if so_entry.has_key("b")}
@@ -71,9 +72,9 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
stderr=open("/dev/null"))
def extract_symbols(stdin):
- """Extracts symbol information from the output of llvm-symbolizer.
+ """Extract symbol information from the output of llvm-symbolizer.
- Returns a list of dictionaries, each of which has fn, file, column and line entries.
+ Return a list of dictionaries, each of which has fn, file, column and line entries.
The format of llvm-symbolizer output is that for every CODE line of input,
it outputs zero or more pairs of lines, and then a blank line. This way, if
@@ -109,47 +110,58 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
return frames
-class path_dbg_file_resolver(object):
+class PathDbgFileResolver(object):
+ """PathDbgFileResolver class."""
+
def __init__(self, bin_path_guess):
+ """Initialize PathDbgFileResolver."""
self._bin_path_guess = bin_path_guess
def get_dbg_file(self, soinfo):
+ """Return dbg file name."""
return soinfo.get("path", self._bin_path_guess)
-class s3_buildid_dbg_file_resolver(object):
+class S3BuildidDbgFileResolver(object):
+ """S3BuildidDbgFileResolver class."""
+
def __init__(self, cache_dir, s3_bucket):
+ """Initialize S3BuildidDbgFileResolver."""
self._cache_dir = cache_dir
self._s3_bucket = s3_bucket
def get_dbg_file(self, soinfo):
- buildId = soinfo.get("buildId", None)
- if buildId is None:
+ """Return dbg file name."""
+ build_id = soinfo.get("buildId", None)
+ if build_id is None:
return None
- buildId = buildId.lower()
- buildIdPath = os.path.join(self._cache_dir, buildId + ".debug")
- if not os.path.exists(buildIdPath):
+ build_id = build_id.lower()
+ build_id_path = os.path.join(self._cache_dir, build_id + ".debug")
+ if not os.path.exists(build_id_path):
try:
- self._get_from_s3(buildId)
- except:
+ self._get_from_s3(build_id)
+ except Exception: # pylint: disable=broad-except
ex = sys.exc_info()[0]
- sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" % (buildId, ex))
+ sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" % (build_id, ex))
return None
- if not os.path.exists(buildIdPath):
+ if not os.path.exists(build_id_path):
return None
- return buildIdPath
+ return build_id_path
- def _get_from_s3(self, buildId):
+ def _get_from_s3(self, build_id):
+ """Download debug symbols from S3."""
subprocess.check_call(
- ['wget', 'https://s3.amazonaws.com/%s/%s.debug.gz' %
- (self._s3_bucket, buildId)], cwd=self._cache_dir)
- subprocess.check_call(['gunzip', buildId + ".debug.gz"], cwd=self._cache_dir)
+ ['wget',
+ 'https://s3.amazonaws.com/%s/%s.debug.gz' %
+ (self._s3_bucket, build_id)], cwd=self._cache_dir)
+ subprocess.check_call(['gunzip', build_id + ".debug.gz"], cwd=self._cache_dir)
-def classic_output(frames, outfile, **kwargs):
+def classic_output(frames, outfile, **kwargs): # pylint: disable=unused-argument
+ """Provide classic output."""
for frame in frames:
symbinfo = frame["symbinfo"]
- if len(symbinfo) > 0:
+ if symbinfo:
for sframe in symbinfo:
outfile.write(" %(file)s:%(line)s:%(column)s: %(fn)s\n" % sframe)
else:
@@ -157,13 +169,14 @@ def classic_output(frames, outfile, **kwargs):
def main(argv):
+ """Execute Main program."""
parser = optparse.OptionParser()
parser.add_option("--dsym-hint", action="append", dest="dsym_hint")
parser.add_option("--symbolizer-path", dest="symbolizer_path", default=None)
parser.add_option("--debug-file-resolver", dest="debug_file_resolver", default="path")
parser.add_option("--output-format", dest="output_format", default="classic")
(options, args) = parser.parse_args(argv)
- resolver_constructor = dict(path=path_dbg_file_resolver, s3=s3_buildid_dbg_file_resolver).get(
+ resolver_constructor = dict(path=PathDbgFileResolver, s3=S3BuildidDbgFileResolver).get(
options.debug_file_resolver, None)
if resolver_constructor is None:
sys.stderr.write("Invalid debug-file-resolver argument: %s\n" % options.debug_file_resolver)
diff --git a/buildscripts/msitrim.py b/buildscripts/msitrim.py
index 52736d8e869..2e6473f535d 100644
--- a/buildscripts/msitrim.py
+++ b/buildscripts/msitrim.py
@@ -1,17 +1,15 @@
-"""Script to fix up our MSI files """
+"""Script to fix up our MSI files."""
+
+from __future__ import print_function
import argparse
-import msilib
import shutil
-parser = argparse.ArgumentParser(description='Trim MSI.')
-parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
-parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
-
-args = parser.parse_args()
+import msilib
-def exec_delete(query):
+def exec_delete(db, query):
+ """Execute delete on db."""
view = db.OpenView(query)
view.Execute(None)
@@ -20,7 +18,8 @@ def exec_delete(query):
view.Close()
-def exec_update(query, column, value):
+def exec_update(db, query, column, value):
+ """Execute update on db."""
view = db.OpenView(query)
view.Execute(None)
@@ -30,23 +29,38 @@ def exec_update(query, column, value):
view.Close()
-print "Trimming MSI"
+def main():
+ """Execute Main program."""
+ parser = argparse.ArgumentParser(description='Trim MSI.')
+ parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
+ parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
+
+ args = parser.parse_args()
+ print("Trimming MSI")
+
+ db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
+
+ exec_delete(
+ db,
+ "select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
+ )
+ exec_delete(
+ db,
+ "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'"
+ )
+ exec_delete(
+ db,
+ "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'"
+ )
+ exec_delete(
+ db,
+ "select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
+ )
-db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
+ db.Commit()
-exec_delete(
- "select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
-)
-exec_delete(
- "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'"
-)
-exec_delete(
- "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'"
-)
-exec_delete(
- "select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
-)
+ shutil.copyfile(args.file.name, args.out.name)
-db.Commit()
-shutil.copyfile(args.file.name, args.out.name)
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/packager.py b/buildscripts/packager.py
index 6fe42425885..426545fe7a5 100755
--- a/buildscripts/packager.py
+++ b/buildscripts/packager.py
@@ -1,30 +1,33 @@
#!/usr/bin/env python
+"""Packager module.
-# This program makes Debian and RPM repositories for MongoDB, by
-# downloading our tarballs of statically linked executables and
-# insinuating them into Linux packages. It must be run on a
-# Debianoid, since Debian provides tools to make RPMs, but RPM-based
-# systems don't provide debian packaging crud.
-
-# Notes:
-#
-# * Almost anything that you want to be able to influence about how a
-# package construction must be embedded in some file that the
-# packaging tool uses for input (e.g., debian/rules, debian/control,
-# debian/changelog; or the RPM specfile), and the precise details are
-# arbitrary and silly. So this program generates all the relevant
-# inputs to the packaging tools.
-#
-# * Once a .deb or .rpm package is made, there's a separate layer of
-# tools that makes a "repository" for use by the apt/yum layers of
-# package tools. The layouts of these repositories are arbitrary and
-# silly, too.
-#
-# * Before you run the program on a new host, these are the
-# prerequisites:
-#
-# apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core
-# echo "Now put the dist gnupg signing keys in ~root/.gnupg"
+This program makes Debian and RPM repositories for MongoDB, by
+downloading our tarballs of statically linked executables and
+insinuating them into Linux packages. It must be run on a
+Debianoid, since Debian provides tools to make RPMs, but RPM-based
+systems don't provide debian packaging crud.
+
+Notes
+-----
+* Almost anything that you want to be able to influence about how a
+package construction must be embedded in some file that the
+packaging tool uses for input (e.g., debian/rules, debian/control,
+debian/changelog; or the RPM specfile), and the precise details are
+arbitrary and silly. So this program generates all the relevant
+inputs to the packaging tools.
+
+* Once a .deb or .rpm package is made, there's a separate layer of
+tools that makes a "repository" for use by the apt/yum layers of
+package tools. The layouts of these repositories are arbitrary and
+silly, too.
+
+* Before you run the program on a new host, these are the
+prerequisites:
+
+apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core
+echo "Now put the dist gnupg signing keys in ~root/.gnupg"
+
+"""
import argparse
import errno
@@ -45,7 +48,10 @@ DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon", "amazon2"]
class Spec(object):
+ """Spec class."""
+
def __init__(self, ver, gitspec=None, rel=None):
+ """Initialize Spec."""
self.ver = ver
self.gitspec = gitspec
self.rel = rel
@@ -54,44 +60,53 @@ class Spec(object):
# Patch builds version numbers are in the form: 3.5.5-64-g03945fa-patch-58debcdb3ff1223c9d00005b
#
def is_nightly(self):
+ """Return True if nightly."""
return bool(re.search("-$", self.version())) or bool(
- re.search("\d-\d+-g[0-9a-f]+$", self.version()))
+ re.search(r"\d-\d+-g[0-9a-f]+$", self.version()))
def is_patch(self):
- return bool(re.search("\d-\d+-g[0-9a-f]+-patch-[0-9a-f]+$", self.version()))
+ """Return True if patch."""
+ return bool(re.search(r"\d-\d+-g[0-9a-f]+-patch-[0-9a-f]+$", self.version()))
def is_rc(self):
- return bool(re.search("-rc\d+$", self.version()))
+ """Return True if rc."""
+ return bool(re.search(r"-rc\d+$", self.version()))
def is_pre_release(self):
+ """Return True if pre-release."""
return self.is_rc() or self.is_nightly()
def version(self):
+ """Return version."""
return self.ver
def patch_id(self):
+ """Return patch id."""
if self.is_patch():
return re.sub(r'.*-([0-9a-f]+$)', r'\1', self.version())
- else:
- return "none"
+ return "none"
def metadata_gitspec(self):
"""Git revision to use for spec+control+init+manpage files.
- The default is the release tag for the version being packaged."""
- if (self.gitspec):
+
+ The default is the release tag for the version being packaged.
+ """
+ if self.gitspec:
return self.gitspec
- else:
- return 'r' + self.version()
+ return 'r' + self.version()
def version_better_than(self, version_string):
+ """Return True if 'version_string' is greater than instance version."""
# FIXME: this is wrong, but I'm in a hurry.
# e.g., "1.8.2" < "1.8.10", "1.8.2" < "1.8.2-rc1"
return self.ver > version_string
def suffix(self):
+ """Return suffix."""
return "-org" if int(self.ver.split(".")[1]) % 2 == 0 else "-org-unstable"
def prelease(self):
+ """Return pre-release verison suffix."""
# NOTE: This is only called for RPM packages, and only after
# pversion() below has been called. If you want to change this format
# and want DEB packages to match, make sure to update pversion()
@@ -114,10 +129,10 @@ class Spec(object):
return "0.%s.latest" % (corenum)
elif self.is_patch():
return "0.%s.patch.%s" % (corenum, self.patch_id())
- else:
- return str(corenum)
+ return str(corenum)
def pversion(self, distro):
+ """Return the pversion."""
# Note: Debian packages have funny rules about dashes in
# version numbers, and RPM simply forbids dashes. pversion
# will be the package's version number (but we need to know
@@ -146,27 +161,36 @@ class Spec(object):
def branch(self):
"""Return the major and minor portions of the specified version.
+
For example, if the version is "2.5.5" the branch would be "2.5"
"""
return ".".join(self.ver.split(".")[0:2])
class Distro(object):
+ """Distro class."""
+
def __init__(self, string):
- self.n = string
+ """Initialize Distro."""
+ self.dname = string
def name(self):
- return self.n
+ """Return name."""
+ return self.dname
- def pkgbase(self):
+ @staticmethod
+ def pkgbase():
+ """Return pkgbase."""
return "mongodb"
def archname(self, arch):
"""Return the packaging system's architecture name.
+
Power and x86 have different names for apt/yum (ppc64le/ppc64el
- and x86_64/amd64)
+ and x86_64/amd64).
"""
- if re.search("^(debian|ubuntu)", self.n):
+ # pylint: disable=too-many-return-statements
+ if re.search("^(debian|ubuntu)", self.dname):
if arch == "ppc64le":
return "ppc64el"
elif arch == "s390x":
@@ -175,23 +199,23 @@ class Distro(object):
return "arm64"
elif arch.endswith("86"):
return "i386"
- else:
- return "amd64"
- elif re.search("^(suse|centos|redhat|fedora|amazon)", self.n):
+ return "amd64"
+ elif re.search("^(suse|centos|redhat|fedora|amazon)", self.dname):
if arch == "ppc64le":
return "ppc64le"
elif arch == "s390x":
return "s390x"
elif arch.endswith("86"):
return "i686"
- else:
- return "x86_64"
+ return "x86_64"
else:
raise Exception("BUG: unsupported platform?")
+ # pylint: enable=too-many-return-statements
+
+ def repodir(self, arch, build_os, spec): # noqa: D406,D407,D412,D413
+ """Return the directory where we'll place the package files for (distro, distro_version).
- def repodir(self, arch, build_os, spec):
- """Return the directory where we'll place the package files for
- (distro, distro_version) in that distro's preferred repository
+ This is in that distro's preferred repository
layout (as distinct from where that distro's packaging building
tools place the package files).
@@ -211,7 +235,6 @@ class Distro(object):
repo/zypper/suse/11/mongodb-org/2.5/x86_64
zypper/suse/11/mongodb-org/2.5/i386
-
"""
repo_directory = ""
@@ -221,16 +244,16 @@ class Distro(object):
else:
repo_directory = spec.branch()
- if re.search("^(debian|ubuntu)", self.n):
+ if re.search("^(debian|ubuntu)", self.dname):
return "repo/apt/%s/dists/%s/mongodb-org/%s/%s/binary-%s/" % (
- self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
+ self.dname, self.repo_os_version(build_os), repo_directory, self.repo_component(),
self.archname(arch))
- elif re.search("(redhat|fedora|centos|amazon)", self.n):
- return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
+ elif re.search("(redhat|fedora|centos|amazon)", self.dname):
+ return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
self.repo_os_version(build_os),
repo_directory, self.archname(arch))
- elif re.search("(suse)", self.n):
- return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
+ elif re.search("(suse)", self.dname):
+ return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
self.repo_os_version(build_os),
repo_directory,
self.archname(arch))
@@ -238,28 +261,33 @@ class Distro(object):
raise Exception("BUG: unsupported platform?")
def repo_component(self):
- """Return the name of the section/component/pool we are publishing into -
- e.g. "multiverse" for Ubuntu, "main" for debian."""
- if self.n == 'ubuntu':
+ """Return the name of the section/component/pool we are publishing into.
+
+ Example, "multiverse" for Ubuntu, "main" for debian.
+ """
+ if self.dname == 'ubuntu':
return "multiverse"
- elif self.n == 'debian':
+ elif self.dname == 'debian':
return "main"
else:
- raise Exception("unsupported distro: %s" % self.n)
+ raise Exception("unsupported distro: %s" % self.dname)
+
+ def repo_os_version(self, build_os): # pylint: disable=too-many-branches
+ """Return an OS version suitable for package repo directory naming.
- def repo_os_version(self, build_os):
- """Return an OS version suitable for package repo directory
- naming - e.g. 5, 6 or 7 for redhat/centos, "precise," "wheezy," etc.
- for Ubuntu/Debian, 11 for suse, "2013.03" for amazon"""
- if self.n == 'suse':
+ Example, 5, 6 or 7 for redhat/centos, "precise," "wheezy," etc.
+ for Ubuntu/Debian, 11 for suse, "2013.03" for amazon.
+ """
+ # pylint: disable=too-many-return-statements
+ if self.dname == 'suse':
return re.sub(r'^suse(\d+)$', r'\1', build_os)
- if self.n == 'redhat':
+ if self.dname == 'redhat':
return re.sub(r'^rhel(\d).*$', r'\1', build_os)
- if self.n == 'amazon':
+ if self.dname == 'amazon':
return "2013.03"
- elif self.n == 'amazon2':
+ elif self.dname == 'amazon2':
return "2017.12"
- elif self.n == 'ubuntu':
+ elif self.dname == 'ubuntu':
if build_os == 'ubuntu1204':
return "precise"
elif build_os == 'ubuntu1404':
@@ -268,7 +296,7 @@ class Distro(object):
return "xenial"
else:
raise Exception("unsupported build_os: %s" % build_os)
- elif self.n == 'debian':
+ elif self.dname == 'debian':
if build_os == 'debian71':
return 'wheezy'
elif build_os == 'debian81':
@@ -278,53 +306,62 @@ class Distro(object):
else:
raise Exception("unsupported build_os: %s" % build_os)
else:
- raise Exception("unsupported distro: %s" % self.n)
+ raise Exception("unsupported distro: %s" % self.dname)
+ # pylint: enable=too-many-return-statements
def make_pkg(self, build_os, arch, spec, srcdir):
- if re.search("^(debian|ubuntu)", self.n):
+ """Return the package."""
+ if re.search("^(debian|ubuntu)", self.dname):
return make_deb(self, build_os, arch, spec, srcdir)
- elif re.search("^(suse|centos|redhat|fedora|amazon)", self.n):
+ elif re.search("^(suse|centos|redhat|fedora|amazon)", self.dname):
return make_rpm(self, build_os, arch, spec, srcdir)
else:
raise Exception("BUG: unsupported platform?")
def build_os(self, arch):
- """Return the build os label in the binary package to download (e.g. "rhel55" for redhat,
- "ubuntu1204" for ubuntu, "debian71" for debian, "suse11" for suse, etc.)"""
+ """Return the build os label in the binary package to download.
+
+ Example, "rhel55" for redhat, "ubuntu1204" for ubuntu, "debian71" for debian,
+ "suse11" for suse, etc.
+ """
# Community builds only support amd64
if arch not in ['x86_64', 'ppc64le', 's390x', 'arm64']:
raise Exception("BUG: unsupported architecture (%s)" % arch)
- if re.search("(suse)", self.n):
+ if re.search("(suse)", self.dname):
return ["suse11", "suse12"]
- elif re.search("(redhat|fedora|centos)", self.n):
+ elif re.search("(redhat|fedora|centos)", self.dname):
return ["rhel70", "rhel71", "rhel72", "rhel62", "rhel55"]
- elif self.n in ['amazon', 'amazon2']:
- return [self.n]
- elif self.n == 'ubuntu':
+ elif self.dname in ['amazon', 'amazon2']:
+ return [self.dname]
+ elif self.dname == 'ubuntu':
return [
"ubuntu1204",
"ubuntu1404",
"ubuntu1604",
]
- elif self.n == 'debian':
+ elif self.dname == 'debian':
return ["debian71", "debian81", "debian92"]
else:
raise Exception("BUG: unsupported platform?")
def release_dist(self, build_os):
- """Return the release distribution to use in the rpm - "el5" for rhel 5.x,
- "el6" for rhel 6.x, return anything else unchanged"""
+ """Return the release distribution to use in the rpm.
+
+ "el5" for rhel 5.x,
+ "el6" for rhel 6.x,
+ return anything else unchanged.
+ """
- if self.n == 'amazon':
+ if self.dname == 'amazon':
return 'amzn1'
- elif self.n == 'amazon2':
+ elif self.dname == 'amazon2':
return 'amzn2'
- else:
- return re.sub(r'^rh(el\d).*$', r'\1', build_os)
+ return re.sub(r'^rh(el\d).*$', r'\1', build_os)
def get_args(distros, arch_choices):
+ """Return the program arguments."""
distro_choices = []
for distro in distros:
@@ -354,7 +391,8 @@ def get_args(distros, arch_choices):
return args
-def main(argv):
+def main():
+ """Execute Main program."""
distros = [Distro(distro) for distro in DISTROS]
@@ -386,26 +424,25 @@ def main(argv):
shutil.copyfile(args.tarball, filename)
repo = make_package(distro, build_os, arch, spec, srcdir)
- make_repo(repo, distro, build_os, spec)
+ make_repo(repo, distro, build_os)
finally:
os.chdir(oldcwd)
def crossproduct(*seqs):
- """A generator for iterating all the tuples consisting of elements
- of seqs."""
- l = len(seqs)
- if l == 0:
+ """Provide a generator for iterating all the tuples consisting of elements of seqs."""
+ num_seqs = len(seqs)
+ if num_seqs == 0:
pass
- elif l == 1:
- for i in seqs[0]:
- yield [i]
+ elif num_seqs == 1:
+ for idx in seqs[0]:
+ yield [idx]
else:
for lst in crossproduct(*seqs[:-1]):
- for i in seqs[-1]:
+ for idx in seqs[-1]:
lst2 = list(lst)
- lst2.append(i)
+ lst2.append(idx)
yield lst2
@@ -414,7 +451,7 @@ def sysassert(argv):
print "In %s, running %s" % (os.getcwd(), " ".join(argv))
sys.stdout.flush()
sys.stderr.flush()
- assert (subprocess.Popen(argv).wait() == 0)
+ assert subprocess.Popen(argv).wait() == 0
def backtick(argv):
@@ -426,12 +463,12 @@ def backtick(argv):
def tarfile(build_os, arch, spec):
- """Return the location where we store the downloaded tarball for
- this package"""
+ """Return the location where we store the downloaded tarball for this package."""
return "dl/mongodb-linux-%s-%s-%s.tar.gz" % (spec.version(), build_os, arch)
def setupdir(distro, build_os, arch, spec):
+ """Return the setup directory name."""
# The setupdir will be a directory containing all inputs to the
# distro's packaging tools (e.g., package metadata files, init
# scripts, etc), along with the already-built binaries). In case
@@ -466,9 +503,10 @@ def unpack_binaries_into(build_os, arch, spec, where):
def make_package(distro, build_os, arch, spec, srcdir):
- """Construct the package for (arch, distro, spec), getting
- packaging files from srcdir and any user-specified suffix from
- suffixes"""
+ """Construct the package for (arch, distro, spec).
+
+ Get the packaging files from srcdir and any user-specified suffix from suffixes.
+ """
sdir = setupdir(distro, build_os, arch, spec)
ensure_dir(sdir)
@@ -494,9 +532,10 @@ def make_package(distro, build_os, arch, spec, srcdir):
return distro.make_pkg(build_os, arch, spec, srcdir)
-def make_repo(repodir, distro, build_os, spec):
+def make_repo(repodir, distro, build_os):
+ """Make the repo."""
if re.search("(debian|ubuntu)", repodir):
- make_deb_repo(repodir, distro, build_os, spec)
+ make_deb_repo(repodir, distro, build_os)
elif re.search("(suse|centos|redhat|fedora|amazon)", repodir):
make_rpm_repo(repodir)
else:
@@ -504,6 +543,7 @@ def make_repo(repodir, distro, build_os, spec):
def make_deb(distro, build_os, arch, spec, srcdir):
+ """Make the Debian script."""
# I can't remember the details anymore, but the initscript/upstart
# job files' names must match the package name in some way; and
# see also the --name flag to dh_installinit in the generated
@@ -559,15 +599,16 @@ def make_deb(distro, build_os, arch, spec, srcdir):
sysassert(["dpkg-buildpackage", "-uc", "-us", "-a" + distro_arch])
finally:
os.chdir(oldcwd)
- r = distro.repodir(arch, build_os, spec)
- ensure_dir(r)
+ repo_dir = distro.repodir(arch, build_os, spec)
+ ensure_dir(repo_dir)
# FIXME: see if shutil.copyfile or something can do this without
# much pain.
- sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\"" % (sdir, r)])
- return r
+ sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\"" % (sdir, repo_dir)])
+ return repo_dir
-def make_deb_repo(repo, distro, build_os, spec):
+def make_deb_repo(repo, distro, build_os):
+ """Make the Debian repo."""
# Note: the Debian repository Packages files must be generated
# very carefully in order to be usable.
oldpwd = os.getcwd()
@@ -575,19 +616,19 @@ def make_deb_repo(repo, distro, build_os, spec):
try:
dirs = set(
[os.path.dirname(deb)[2:] for deb in backtick(["find", ".", "-name", "*.deb"]).split()])
- for d in dirs:
- s = backtick(["dpkg-scanpackages", d, "/dev/null"])
- with open(d + "/Packages", "w") as f:
- f.write(s)
- b = backtick(["gzip", "-9c", d + "/Packages"])
- with open(d + "/Packages.gz", "wb") as f:
- f.write(b)
+ for directory in dirs:
+ st = backtick(["dpkg-scanpackages", directory, "/dev/null"])
+ with open(directory + "/Packages", "w") as fh:
+ fh.write(st)
+ bt = backtick(["gzip", "-9c", directory + "/Packages"])
+ with open(directory + "/Packages.gz", "wb") as fh:
+ fh.write(bt)
finally:
os.chdir(oldpwd)
# Notes: the Release{,.gpg} files must live in a special place,
# and must be created after all the Packages.gz files have been
# done.
- s = """Origin: mongodb
+ s1 = """Origin: mongodb
Label: mongodb
Suite: %s
Codename: %s/mongodb-org
@@ -603,14 +644,15 @@ Description: MongoDB packages
os.chdir(repo + "../../")
s2 = backtick(["apt-ftparchive", "release", "."])
try:
- with open("Release", 'w') as f:
- f.write(s)
- f.write(s2)
+ with open("Release", 'w') as fh:
+ fh.write(s1)
+ fh.write(s2)
finally:
os.chdir(oldpwd)
-def move_repos_into_place(src, dst):
+def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
+ """Move the repos into place."""
# Find all the stuff in src/*, move it to a freshly-created
# directory beside dst, then play some games with symlinks so that
# dst is a name the new stuff and dst+".old" names the previous
@@ -633,8 +675,8 @@ def move_repos_into_place(src, dst):
i = i + 1
# Put the stuff in our new directory.
- for r in os.listdir(src):
- sysassert(["cp", "-rv", src + "/" + r, dname])
+ for src_file in os.listdir(src):
+ sysassert(["cp", "-rv", src + "/" + src_file, dname])
# Make a symlink to the new directory; the symlink will be renamed
# to dst shortly.
@@ -675,30 +717,31 @@ def move_repos_into_place(src, dst):
def write_debian_changelog(path, spec, srcdir):
+ """Write the debian changelog."""
oldcwd = os.getcwd()
os.chdir(srcdir)
preamble = ""
try:
- s = preamble + backtick(
+ sb = preamble + backtick(
["sh", "-c",
"git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()])
finally:
os.chdir(oldcwd)
- lines = s.split("\n")
+ lines = sb.split("\n")
# If the first line starts with "mongodb", it's not a revision
# preamble, and so frob the version number.
lines[0] = re.sub("^mongodb \\(.*\\)", "mongodb (%s)" % (spec.pversion(Distro("debian"))),
lines[0])
# Rewrite every changelog entry starting in mongodb<space>
- lines = [re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), l) for l in lines]
- lines = [re.sub("^ --", " --", l) for l in lines]
- s = "\n".join(lines)
- with open(path, 'w') as f:
- f.write(s)
+ lines = [re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), line) for line in lines]
+ lines = [re.sub("^ --", " --", line) for line in lines]
+ sb = "\n".join(lines)
+ with open(path, 'w') as fh:
+ fh.write(sb)
-def make_rpm(distro, build_os, arch, spec, srcdir):
- # Create the specfile.
+def make_rpm(distro, build_os, arch, spec, srcdir): # pylint: disable=too-many-locals
+ """Create the RPM specfile."""
suffix = spec.suffix()
sdir = setupdir(distro, build_os, arch, spec)
@@ -757,7 +800,7 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
macropath = os.getcwd() + "/macros"
write_rpm_macros_file(macropath, topdir, distro.release_dist(build_os))
- if len(macrofiles) > 0:
+ if macrofiles:
macrofiles = macrofiles[0] + ":" + macropath
rcfile = os.getcwd() + "/rpmrc"
write_rpmrc_file(rcfile, macrofiles)
@@ -801,15 +844,16 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
])
sysassert(["rpmbuild", "-ba", "--target", distro_arch] + flags +
["%s/SPECS/mongodb%s.spec" % (topdir, suffix)])
- r = distro.repodir(arch, build_os, spec)
- ensure_dir(r)
+ repo_dir = distro.repodir(arch, build_os, spec)
+ ensure_dir(repo_dir)
# FIXME: see if some combination of shutil.copy<hoohah> and glob
# can do this without shelling out.
- sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\"" % (topdir, distro_arch, r)])
- return r
+ sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\"" % (topdir, distro_arch, repo_dir)])
+ return repo_dir
def make_rpm_repo(repo):
+ """Make the RPM repo."""
oldpwd = os.getcwd()
os.chdir(repo + "../")
try:
@@ -819,20 +863,21 @@ def make_rpm_repo(repo):
def write_rpmrc_file(path, string):
- with open(path, 'w') as f:
- f.write(string)
+ """Write the RPM rc file."""
+ with open(path, 'w') as fh:
+ fh.write(string)
def write_rpm_macros_file(path, topdir, release_dist):
- with open(path, 'w') as f:
- f.write("%%_topdir %s\n" % topdir)
- f.write("%%dist .%s\n" % release_dist)
- f.write("%_use_internal_dependency_generator 0\n")
+ """Write the RPM macros file."""
+ with open(path, 'w') as fh:
+ fh.write("%%_topdir %s\n" % topdir)
+ fh.write("%%dist .%s\n" % release_dist)
+ fh.write("%_use_internal_dependency_generator 0\n")
def ensure_dir(filename):
- """Make sure that the directory that's the dirname part of
- filename exists, and return filename."""
+ """Ensure that the dirname directory of filename exists, and return filename."""
dirpart = os.path.dirname(filename)
try:
os.makedirs(dirpart)
@@ -846,12 +891,12 @@ def ensure_dir(filename):
def is_valid_file(parser, filename):
- """Check if file exists, and return the filename"""
+ """Check if file exists, and return the filename."""
if not os.path.exists(filename):
parser.error("The file %s does not exist!" % filename)
- else:
- return filename
+ return None
+ return filename
if __name__ == "__main__":
- main(sys.argv)
+ main()
diff --git a/buildscripts/packager-enterprise.py b/buildscripts/packager_enterprise.py
index a51e2dab115..0ef782862d9 100755
--- a/buildscripts/packager-enterprise.py
+++ b/buildscripts/packager_enterprise.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+"""Packager Enterprise module."""
# This program makes Debian and RPM repositories for MongoDB, by
# downloading our tarballs of statically linked executables and
@@ -26,20 +27,16 @@
# apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core libsnmp15
# echo "Now put the dist gnupg signing keys in ~root/.gnupg"
-import argparse
import errno
-import getopt
from glob import glob
-import packager
import os
import re
import shutil
-import stat
-import subprocess
import sys
import tempfile
import time
-import urlparse
+
+import packager # pylint: disable=relative-import
# The MongoDB names for the architectures we support.
ARCH_CHOICES = ["x86_64", "ppc64le", "s390x", "arm64"]
@@ -49,20 +46,26 @@ DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon", "amazon2"]
class EnterpriseSpec(packager.Spec):
+ """EnterpriseSpec class."""
+
def suffix(self):
+ """Suffix."""
return "-enterprise" if int(self.ver.split(".")[1]) % 2 == 0 else "-enterprise-unstable"
class EnterpriseDistro(packager.Distro):
- def repodir(self, arch, build_os, spec):
- """Return the directory where we'll place the package files for
- (distro, distro_version) in that distro's preferred repository
+ """EnterpriseDistro class."""
+
+ def repodir(self, arch, build_os, spec): # noqa: D406,D407,D412,D413
+ """Return the directory where we'll place the package files.
+
+ This is for (distro, distro_version) in that distro's preferred repository
layout (as distinct from where that distro's packaging building
tools place the package files).
- Packages will go into repos corresponding to the major release
- series (2.5, 2.6, 2.7, 2.8, etc.) except for RC's and nightlies
- which will go into special separate "testing" directories
+ Packages will go into repos corresponding to the major release
+ series (2.5, 2.6, 2.7, 2.8, etc.) except for RC's and nightlies
+ which will go into special separate "testing" directories
Examples:
@@ -86,7 +89,6 @@ class EnterpriseDistro(packager.Distro):
repo/zypper/suse/11/mongodb-enterprise/testing/x86_64
repo/zypper/suse/11/mongodb-enterprise/testing/i386
-
"""
repo_directory = ""
@@ -96,52 +98,53 @@ class EnterpriseDistro(packager.Distro):
else:
repo_directory = spec.branch()
- if re.search("^(debian|ubuntu)", self.n):
+ if re.search("^(debian|ubuntu)", self.dname):
return "repo/apt/%s/dists/%s/mongodb-enterprise/%s/%s/binary-%s/" % (
- self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
+ self.dname, self.repo_os_version(build_os), repo_directory, self.repo_component(),
self.archname(arch))
- elif re.search("(redhat|fedora|centos|amazon)", self.n):
+ elif re.search("(redhat|fedora|centos|amazon)", self.dname):
return "repo/yum/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
- self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
- elif re.search("(suse)", self.n):
+ self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ elif re.search("(suse)", self.dname):
return "repo/zypper/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
- self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
else:
raise Exception("BUG: unsupported platform?")
- def build_os(self, arch):
- """Return the build os label in the binary package to download ("rhel57", "rhel62", "rhel67" and "rhel70"
- for redhat, the others are delegated to the super class
+ def build_os(self, arch): # pylint: disable=too-many-branches
+ """Return the build os label in the binary package to download.
+
+ The labels "rhel57", "rhel62", "rhel67" and "rhel70" are for redhat,
+ the others are delegated to the super class.
"""
+ # pylint: disable=too-many-return-statements
if arch == "ppc64le":
- if self.n == 'ubuntu':
+ if self.dname == 'ubuntu':
return ["ubuntu1604"]
- if self.n == 'redhat':
+ if self.dname == 'redhat':
return ["rhel71"]
- else:
- return []
+ return []
if arch == "s390x":
- if self.n == 'redhat':
+ if self.dname == 'redhat':
return ["rhel67", "rhel72"]
- if self.n == 'suse':
+ if self.dname == 'suse':
return ["suse11", "suse12"]
- if self.n == 'ubuntu':
+ if self.dname == 'ubuntu':
return ["ubuntu1604"]
- else:
- return []
+ return []
if arch == "arm64":
- if self.n == 'ubuntu':
+ if self.dname == 'ubuntu':
return ["ubuntu1604"]
- else:
- return []
+ return []
- if re.search("(redhat|fedora|centos)", self.n):
+ if re.search("(redhat|fedora|centos)", self.dname):
return ["rhel70", "rhel62", "rhel57"]
- else:
- return super(EnterpriseDistro, self).build_os(arch)
+ return super(EnterpriseDistro, self).build_os(arch)
+ # pylint: enable=too-many-return-statements
-def main(argv):
+def main():
+ """Execute Main program."""
distros = [EnterpriseDistro(distro) for distro in DISTROS]
@@ -175,7 +178,7 @@ def main(argv):
shutil.copyfile(args.tarball, filename)
repo = make_package(distro, build_os, arch, spec, srcdir)
- make_repo(repo, distro, build_os, spec)
+ make_repo(repo, distro, build_os)
made_pkg = True
@@ -187,15 +190,15 @@ def main(argv):
def tarfile(build_os, arch, spec):
- """Return the location where we store the downloaded tarball for
- this package"""
+ """Return the location where we store the downloaded tarball for this package."""
return "dl/mongodb-linux-%s-enterprise-%s-%s.tar.gz" % (spec.version(), build_os, arch)
def setupdir(distro, build_os, arch, spec):
+ """Return the setup directory name."""
# The setupdir will be a directory containing all inputs to the
# distro's packaging tools (e.g., package metadata files, init
- # scripts, etc), along with the already-built binaries). In case
+ # scripts, etc, along with the already-built binaries). In case
# the following format string is unclear, an example setupdir
# would be dst/x86_64/debian-sysvinit/wheezy/mongodb-org-unstable/
# or dst/x86_64/redhat/rhel57/mongodb-org-unstable/
@@ -226,9 +229,10 @@ def unpack_binaries_into(build_os, arch, spec, where):
def make_package(distro, build_os, arch, spec, srcdir):
- """Construct the package for (arch, distro, spec), getting
- packaging files from srcdir and any user-specified suffix from
- suffixes"""
+ """Construct the package for (arch, distro, spec).
+
+ Get the packaging files from srcdir and any user-specified suffix from suffixes.
+ """
sdir = setupdir(distro, build_os, arch, spec)
packager.ensure_dir(sdir)
@@ -254,16 +258,18 @@ def make_package(distro, build_os, arch, spec, srcdir):
return distro.make_pkg(build_os, arch, spec, srcdir)
-def make_repo(repodir, distro, build_os, spec):
+def make_repo(repodir, distro, build_os):
+ """Make the repo."""
if re.search("(debian|ubuntu)", repodir):
- make_deb_repo(repodir, distro, build_os, spec)
+ make_deb_repo(repodir, distro, build_os)
elif re.search("(suse|centos|redhat|fedora|amazon)", repodir):
packager.make_rpm_repo(repodir)
else:
raise Exception("BUG: unsupported platform?")
-def make_deb_repo(repo, distro, build_os, spec):
+def make_deb_repo(repo, distro, build_os):
+ """Make the Debian repo."""
# Note: the Debian repository Packages files must be generated
# very carefully in order to be usable.
oldpwd = os.getcwd()
@@ -273,19 +279,19 @@ def make_deb_repo(repo, distro, build_os, spec):
os.path.dirname(deb)[2:]
for deb in packager.backtick(["find", ".", "-name", "*.deb"]).split()
])
- for d in dirs:
- s = packager.backtick(["dpkg-scanpackages", d, "/dev/null"])
- with open(d + "/Packages", "w") as f:
- f.write(s)
- b = packager.backtick(["gzip", "-9c", d + "/Packages"])
- with open(d + "/Packages.gz", "wb") as f:
- f.write(b)
+ for directory in dirs:
+ st = packager.backtick(["dpkg-scanpackages", directory, "/dev/null"])
+ with open(directory + "/Packages", "w") as fh:
+ fh.wmake_deb_reporite(st)
+ bt = packager.backtick(["gzip", "-9c", directory + "/Packages"])
+ with open(directory + "/Packages.gz", "wb") as fh:
+ fh.write(bt)
finally:
os.chdir(oldpwd)
# Notes: the Release{,.gpg} files must live in a special place,
# and must be created after all the Packages.gz files have been
# done.
- s = """Origin: mongodb
+ s1 = """Origin: mongodb
Label: mongodb
Suite: %s
Codename: %s/mongodb-enterprise
@@ -301,24 +307,25 @@ Description: MongoDB packages
os.chdir(repo + "../../")
s2 = packager.backtick(["apt-ftparchive", "release", "."])
try:
- with open("Release", 'w') as f:
- f.write(s)
- f.write(s2)
+ with open("Release", 'w') as fh:
+ fh.write(s1)
+ fh.write(s2)
finally:
os.chdir(oldpwd)
-def move_repos_into_place(src, dst):
+def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
+ """Move the repos into place."""
# Find all the stuff in src/*, move it to a freshly-created
# directory beside dst, then play some games with symlinks so that
# dst is a name the new stuff and dst+".old" names the previous
# one. This feels like a lot of hooey for something so trivial.
# First, make a crispy fresh new directory to put the stuff in.
- i = 0
+ idx = 0
while True:
date_suffix = time.strftime("%Y-%m-%d")
- dname = dst + ".%s.%d" % (date_suffix, i)
+ dname = dst + ".%s.%d" % (date_suffix, idx)
try:
os.mkdir(dname)
break
@@ -328,17 +335,17 @@ def move_repos_into_place(src, dst):
pass
else:
raise exc
- i = i + 1
+ idx = idx + 1
# Put the stuff in our new directory.
- for r in os.listdir(src):
- packager.sysassert(["cp", "-rv", src + "/" + r, dname])
+ for src_file in os.listdir(src):
+ packager.sysassert(["cp", "-rv", src + "/" + src_file, dname])
# Make a symlink to the new directory; the symlink will be renamed
# to dst shortly.
- i = 0
+ idx = 0
while True:
- tmpnam = dst + ".TMP.%d" % i
+ tmpnam = dst + ".TMP.%d" % idx
try:
os.symlink(dname, tmpnam)
break
@@ -348,15 +355,15 @@ def move_repos_into_place(src, dst):
pass
else:
raise exc
- i = i + 1
+ idx = idx + 1
# Make a symlink to the old directory; this symlink will be
# renamed shortly, too.
oldnam = None
if os.path.exists(dst):
- i = 0
+ idx = 0
while True:
- oldnam = dst + ".old.%d" % i
+ oldnam = dst + ".old.%d" % idx
try:
os.symlink(os.readlink(dst), oldnam)
break
@@ -373,4 +380,4 @@ def move_repos_into_place(src, dst):
if __name__ == "__main__":
- main(sys.argv)
+ main()
diff --git a/buildscripts/promote_silent_failures.py b/buildscripts/promote_silent_failures.py
index 81bdb72e694..cf1ddab7c7a 100644
--- a/buildscripts/promote_silent_failures.py
+++ b/buildscripts/promote_silent_failures.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python
-"""
-Converts silent test failures into non-silent failures.
+"""Convert silent test failures into non-silent failures.
Any test files with at least 2 executions in the report.json file that have a "silentfail" status,
this script will change the outputted report to have a "fail" status instead.
@@ -22,11 +21,13 @@ if __name__ == "__main__" and __package__ is None:
def read_json_file(json_file):
+ """Return contents of a JSON file."""
with open(json_file) as json_data:
return json.load(json_data)
def main():
+ """Execute Main program."""
usage = "usage: %prog [options] report.json"
parser = optparse.OptionParser(usage=usage)
diff --git a/buildscripts/prune_check.py b/buildscripts/prune_check.py
index 570715c4c1f..1579c396fda 100644
--- a/buildscripts/prune_check.py
+++ b/buildscripts/prune_check.py
@@ -1,26 +1,28 @@
#!/usr/bin/env python2
-""" This program stamps the shared scons directory with a timestamp so we can
- determine the last prune time and run the prune script on a schedule.
- It is meant to be invoked from the shell:
+"""Prune check program.
- if python prune_check.py; then
- echo 'Pruning'
- else
- echo 'Less than 24 hours, waiting ...'
- fi
+This program stamps the shared scons directory with a timestamp so we can
+determine the last prune time and run the prune script on a schedule.
+It is meant to be invoked from the shell:
- The script can be invoked with optional arguments for mount point and 'seconds
- since last prune' (default is 86400 - 24 hours). Use -h to see options and defaults.
+if python prune_check.py; then
+ echo 'Pruning'
+else
+ echo 'Less than 24 hours, waiting ...'
+fi
- python prune_check.py -m '/mount_point' -p 86400
+The script can be invoked with optional arguments for mount point and 'seconds
+since last prune' (default is 86400 - 24 hours). Use -h to see options and defaults.
- To write the latest timestamp to a directory
+python prune_check.py -m '/mount_point' -p 86400
- python prune_check.py -w
+To write the latest timestamp to a directory
- If it is time to prune (ie. more than 24 hours since the last timestamp),
- the script exits with a 0 return code.
- Otherwise the script returns exit code 1.
+python prune_check.py -w
+
+If it is time to prune (ie. more than 24 hours since the last timestamp),
+the script exits with a 0 return code.
+Otherwise the script returns exit code 1.
"""
import argparse
@@ -33,23 +35,23 @@ DATE_TIME_STR = "%Y-%m-%d %H:%M:%S"
def get_prune_file_path(mount_point):
- """ Get the shared scons directory for this AMI """
- with open('/etc/mongodb-build-system-id', 'r') as f:
- uuid = f.read().strip()
+ """Get the shared scons directory for this AMI."""
+ with open('/etc/mongodb-build-system-id', 'r') as fh:
+ uuid = fh.read().strip()
return os.path.join(mount_point, uuid, 'info', 'last_prune_time')
def write_last_prune_time(last_prune_time, prune_file_path):
- """ Write the last prune timestamp in a 'last_prune_time' file """
- with open(prune_file_path, 'w') as f:
- f.write(last_prune_time.strftime(DATE_TIME_STR) + '\n')
+ """Write the last prune timestamp in a 'last_prune_time' file."""
+ with open(prune_file_path, 'w') as fh:
+ fh.write(last_prune_time.strftime(DATE_TIME_STR) + '\n')
def retrieve_last_prune_time(prune_file_path):
- """ Get the last prune time from the 'last_prune_time' file """
+ """Get the last prune time from the 'last_prune_time' file."""
if os.path.isfile(prune_file_path):
- with open(prune_file_path, 'r') as f:
- last_prune_time_str = f.read().strip()
+ with open(prune_file_path, 'r') as fh:
+ last_prune_time_str = fh.read().strip()
last_prune_time = datetime.strptime(last_prune_time_str, DATE_TIME_STR)
else:
last_prune_time = datetime.utcnow()
@@ -59,8 +61,9 @@ def retrieve_last_prune_time(prune_file_path):
def check_last_prune_time(args):
- """ Returns exit code 0 if time to run again, else returns exit code 1
- This is meant to be called from the shell
+ """Return exit code 0 if time to run again, else return exit code 1.
+
+ This is meant to be called from the shell
"""
seconds_since_last_prune = args.prune_seconds
@@ -87,7 +90,7 @@ def check_last_prune_time(args):
def get_command_line_args():
- """ Get the command line arguments """
+ """Get the command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--mount_point', type=str, required=False,
help="The base mount where efs is mounted. Default is '/efs'",
@@ -102,6 +105,7 @@ def get_command_line_args():
def main():
+ """Execute Main program."""
args = get_command_line_args()
mount_point = args.mount_point
diff --git a/buildscripts/pylinters.py b/buildscripts/pylinters.py
index 617406b2fb3..9ddce0a2c30 100755
--- a/buildscripts/pylinters.py
+++ b/buildscripts/pylinters.py
@@ -48,7 +48,7 @@ def get_py_linter(linter_filter):
linter_candidates = [linter for linter in _LINTERS if linter.cmd_name in linter_list]
- if len(linter_candidates) == 0:
+ if not linter_candidates:
raise ValueError("No linters found for filter '%s'" % (linter_filter))
return linter_candidates
@@ -56,18 +56,12 @@ def get_py_linter(linter_filter):
def is_interesting_file(file_name):
# type: (str) -> bool
- """"Return true if this file should be checked."""
+ """Return true if this file should be checked."""
return file_name.endswith(".py") and (file_name.startswith("buildscripts/idl")
or file_name.startswith("buildscripts/linter")
or file_name.startswith("buildscripts/pylinters.py"))
-def _get_build_dir():
- # type: () -> str
- """Get the location of the scons' build directory in case we need to download clang-format."""
- return os.path.join(git.get_base_dir(), "build")
-
-
def _lint_files(linters, config_dict, file_names):
# type: (str, Dict[str, str], List[str]) -> None
"""Lint a list of files with clang-format."""
@@ -123,7 +117,7 @@ def _fix_files(linters, config_dict, file_names):
# Get a list of linters which return a valid command for get_fix_cmd()
fix_list = [fixer for fixer in linter_list if fixer.get_fix_cmd_args("ignore")]
- if len(fix_list) == 0:
+ if not fix_list:
raise ValueError("Cannot find any linters '%s' that support fixing." % (linters))
lint_runner = runner.LintRunner()
@@ -152,7 +146,7 @@ def fix_func(linters, config_dict, file_names):
def main():
# type: () -> None
- """Main entry point."""
+ """Execute Main entry point."""
parser = argparse.ArgumentParser(description='PyLinter frontend.')
diff --git a/buildscripts/remote_operations.py b/buildscripts/remote_operations.py
index b75af20a049..a1f95f117e8 100755
--- a/buildscripts/remote_operations.py
+++ b/buildscripts/remote_operations.py
@@ -21,7 +21,7 @@ if os.name == "posix" and sys.version_info[0] == 2:
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess
+ import subprocess # type: ignore
else:
import subprocess
@@ -42,7 +42,7 @@ _SSH_CONNECTION_ERRORS = [
def posix_path(path):
- """ Returns posix path, used on Windows since scp requires posix style paths. """
+ """Return posix path, used on Windows since scp requires posix style paths."""
# If path is already quoted, we need to remove the quotes before calling
path_quote = "\'" if path.startswith("\'") else ""
path_quote = "\"" if path.startswith("\"") else path_quote
@@ -54,11 +54,13 @@ def posix_path(path):
return "{quote}{path}{quote}".format(quote=path_quote, path=new_path)
-class RemoteOperations(object):
+class RemoteOperations(object): # pylint: disable=too-many-instance-attributes
"""Class to support remote operations."""
- def __init__(self, user_host, ssh_connection_options=None, ssh_options=None, scp_options=None,
- retries=0, retry_sleep=0, debug=False, shell_binary="/bin/bash", use_shell=False):
+ def __init__( # pylint: disable=too-many-arguments
+ self, user_host, ssh_connection_options=None, ssh_options=None, scp_options=None,
+ retries=0, retry_sleep=0, debug=False, shell_binary="/bin/bash", use_shell=False):
+ """Initialize RemoteOperations."""
self.user_host = user_host
self.ssh_connection_options = ssh_connection_options if ssh_connection_options else ""
@@ -85,7 +87,7 @@ class RemoteOperations(object):
return process.poll(), buff_stdout
def _remote_access(self):
- """ This will check if a remote session is possible. """
+ """Check if a remote session is possible."""
cmd = "ssh {} {} {} date".format(self.ssh_connection_options, self.ssh_options,
self.user_host)
attempt_num = 0
@@ -108,19 +110,20 @@ class RemoteOperations(object):
return self._call(cmd)
def access_established(self):
- """ Returns True if initial access was establsished. """
+ """Return True if initial access was establsished."""
return not self._access_code
def access_info(self):
- """ Returns return code and output buffer from initial access attempt(s). """
+ """Return the return code and output buffer from initial access attempt(s)."""
return self._access_code, self._access_buff
- def operation(self, operation_type, operation_param, operation_dir=None):
- """ Main entry for remote operations. Returns (code, output).
+ def operation( # pylint: disable=too-many-branches
+ self, operation_type, operation_param, operation_dir=None):
+ """Execute Main entry for remote operations. Returns (code, output).
- 'operation_type' supports remote shell and copy operations.
- 'operation_param' can either be a list or string of commands or files.
- 'operation_dir' is '.' if unspecified for 'copy_*'.
+ 'operation_type' supports remote shell and copy operations.
+ 'operation_param' can either be a list or string of commands or files.
+ 'operation_dir' is '.' if unspecified for 'copy_*'.
"""
if not self.access_established():
@@ -195,23 +198,23 @@ class RemoteOperations(object):
return final_ret, buff
def shell(self, operation_param, operation_dir=None):
- """ Helper for remote shell operations. """
+ """Provide helper for remote shell operations."""
return self.operation(operation_type="shell", operation_param=operation_param,
operation_dir=operation_dir)
def copy_to(self, operation_param, operation_dir=None):
- """ Helper for remote copy_to operations. """
+ """Provide helper for remote copy_to operations."""
return self.operation(operation_type="copy_to", operation_param=operation_param,
operation_dir=operation_dir)
def copy_from(self, operation_param, operation_dir=None):
- """ Helper for remote copy_from operations. """
+ """Provide helper for remote copy_from operations."""
return self.operation(operation_type="copy_from", operation_param=operation_param,
operation_dir=operation_dir)
-def main():
- """ Main program. """
+def main(): # pylint: disable=too-many-branches,too-many-statements
+ """Execute Main program."""
parser = optparse.OptionParser(description=__doc__)
control_options = optparse.OptionGroup(parser, "Control options")
@@ -336,10 +339,10 @@ def main():
user_host=options.user_host, ssh_connection_options=ssh_connection_options,
ssh_options=ssh_options, scp_options=scp_options, retries=options.retries,
retry_sleep=options.retry_sleep, debug=options.debug)
- ret_code, buffer = remote_op.operation(options.operation, operation_param, operation_dir)
+ ret_code, buff = remote_op.operation(options.operation, operation_param, operation_dir)
if options.verbose:
print("Return code: {} for command {}".format(ret_code, sys.argv))
- print(buffer)
+ print(buff)
sys.exit(ret_code)
diff --git a/buildscripts/requirements.txt b/buildscripts/requirements.txt
index 2218745ec09..8164624bcc4 100644
--- a/buildscripts/requirements.txt
+++ b/buildscripts/requirements.txt
@@ -7,11 +7,11 @@ pyyaml == 3.11
unittest-xml-reporting == 2.1.0
# Linters
yapf == 0.21.0
-mypy == 0.501 ; python_version > "3"
+mypy == 0.580 ; python_version > "3"
# typing in Python 2 for mypy
typing == 3.6.1; python_version < "3"
-pylint == 1.6.5
-pydocstyle == 1.1.1
+pylint == 1.8.3
+pydocstyle == 2.1.1
# resmoke.py
-r resmokelib/requirements.txt
# generate_error_codes.py
diff --git a/buildscripts/resmoke.py b/buildscripts/resmoke.py
index 798e11e2c46..a9f079d02b1 100755
--- a/buildscripts/resmoke.py
+++ b/buildscripts/resmoke.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python
-"""
-Command line utility for executing MongoDB tests of all kinds.
-"""
+"""Command line utility for executing MongoDB tests of all kinds."""
from __future__ import absolute_import
@@ -14,14 +12,13 @@ import time
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-from buildscripts import resmokelib
+from buildscripts import resmokelib # pylint: disable=wrong-import-position
-def _execute_suite(suite):
- """
- Executes the test suite, failing fast if requested.
+def _execute_suite(suite): # pylint: disable=too-many-branches,too-many-return-statements
+ """Execute the test suite, failing fast if requested.
- Returns true if the execution of the suite was interrupted by the
+ Return true if the execution of the suite was interrupted by the
user, and false otherwise.
"""
@@ -36,7 +33,7 @@ def _execute_suite(suite):
if resmokelib.config.DRY_RUN == "tests":
sb = []
sb.append("Tests that would be run in suite %s:" % suite.get_display_name())
- if len(suite.tests) > 0:
+ if suite.tests:
for test in suite.tests:
sb.append(test)
else:
@@ -44,7 +41,7 @@ def _execute_suite(suite):
logger.info("\n".join(sb))
sb = []
sb.append("Tests that would be excluded from suite %s:" % suite.get_display_name())
- if len(suite.excluded) > 0:
+ if suite.excluded:
for test in suite.excluded:
sb.append(test)
else:
@@ -56,7 +53,7 @@ def _execute_suite(suite):
suite.return_code = 0
return False
- if len(suite.tests) == 0:
+ if not suite.tests:
logger.info("Skipping %ss, no tests to run", suite.test_kind)
# Set a successful return code on the test suite because we want to output the tests
@@ -85,7 +82,7 @@ def _execute_suite(suite):
except IOError:
suite.return_code = 74 # Exit code for IOError on POSIX systems.
return True
- except:
+ except: # pylint: disable=bare-except
logger.exception("Encountered an error when running %ss of suite %s.", suite.test_kind,
suite.get_display_name())
suite.return_code = 2
@@ -93,6 +90,7 @@ def _execute_suite(suite):
finally:
if archive:
archive.exit()
+ return True
def _log_summary(logger, suites, time_taken):
@@ -107,8 +105,7 @@ def _summarize_suite(suite):
def _dump_suite_config(suite, logging_config):
- """
- Returns a string that represents the YAML configuration of a suite.
+ """Return a string that represents the YAML configuration of a suite.
TODO: include the "options" key in the result
"""
@@ -126,9 +123,9 @@ def _dump_suite_config(suite, logging_config):
def find_suites_by_test(suites):
- """
- Looks up what other resmoke suites run the tests specified in the suites
- parameter. Returns a dict keyed by test name, value is array of suite names.
+ """Look up what other resmoke suites run the tests specified in the suites parameter.
+
+ Return a dict keyed by test name, value is array of suite names.
"""
memberships = {}
@@ -146,14 +143,10 @@ def _list_suites_and_exit(logger, exit_code=0):
class Main(object):
- """
- A class for executing potentially multiple resmoke.py test suites.
- """
+ """A class for executing potentially multiple resmoke.py test suites."""
def __init__(self):
- """
- Initializes the Main instance by parsing the command line arguments.
- """
+ """Initialize the Main instance by parsing the command line arguments."""
self.__start_time = time.time()
@@ -162,17 +155,13 @@ class Main(object):
self.__args = args
def _get_suites(self):
- """
- Returns a list of resmokelib.testing.suite.Suite instances to execute.
- """
+ """Return a list of resmokelib.testing.suite.Suite instances to execute."""
return resmokelib.suitesconfig.get_suites(
suite_files=self.__values.suite_files.split(","), test_files=self.__args)
def run(self):
- """
- Executes the list of resmokelib.testing.suite.Suite instances returned by _get_suites().
- """
+ """Execute the list of resmokelib.testing.suite.Suite instances."""
logging_config = resmokelib.parser.get_logging_config(self.__values)
resmokelib.logging.loggers.configure_loggers(logging_config)
diff --git a/buildscripts/resmokeconfig/__init__.py b/buildscripts/resmokeconfig/__init__.py
index 37f5a889956..b3462adcc2c 100644
--- a/buildscripts/resmokeconfig/__init__.py
+++ b/buildscripts/resmokeconfig/__init__.py
@@ -1,3 +1,4 @@
+"""Resmokeconfig module."""
from __future__ import absolute_import
from .suites import NAMED_SUITES
diff --git a/buildscripts/resmokeconfig/loggers/__init__.py b/buildscripts/resmokeconfig/loggers/__init__.py
index 1cecd4d110e..5342639c567 100644
--- a/buildscripts/resmokeconfig/loggers/__init__.py
+++ b/buildscripts/resmokeconfig/loggers/__init__.py
@@ -1,7 +1,4 @@
-"""
-Defines a mapping of shortened names for logger configuration files to
-their full path.
-"""
+"""Defines a mapping of shortened names for logger configuration files to their full path."""
from __future__ import absolute_import
@@ -10,11 +7,9 @@ import os.path
def _get_named_loggers():
- """
- Explores this directory for any YAML configuration files.
+ """Explore this directory for any YAML configuration files.
- Returns a mapping of basenames without the file extension to their
- full path.
+ Returns a mapping of basenames without the file extension to their full path.
"""
dirname = os.path.dirname(__file__)
diff --git a/buildscripts/resmokeconfig/suites/__init__.py b/buildscripts/resmokeconfig/suites/__init__.py
index 4cb601ba9d9..87d378616bc 100644
--- a/buildscripts/resmokeconfig/suites/__init__.py
+++ b/buildscripts/resmokeconfig/suites/__init__.py
@@ -1,7 +1,4 @@
-"""
-Defines a mapping of shortened names for suite configuration files to
-their full path.
-"""
+"""Defines a mapping of shortened names for suite configuration files to their full path."""
from __future__ import absolute_import
@@ -10,11 +7,9 @@ import os.path
def _get_named_suites():
- """
- Explores this directory for any YAML configuration files.
+ """Explore this directory for any YAML configuration files.
- Returns a mapping of basenames without the file extension to their
- full path.
+ Returns a mapping of basenames without the file extension to their full path.
"""
dirname = os.path.dirname(__file__)
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
index 4c8f0a4e720..66753c389db 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -1,6 +1,4 @@
-"""
-Configuration options for resmoke.py.
-"""
+"""Configuration options for resmoke.py."""
from __future__ import absolute_import
@@ -113,19 +111,16 @@ _SuiteOptions = collections.namedtuple("_SuiteOptions", [
class SuiteOptions(_SuiteOptions):
- """
- A class for representing top-level options to resmoke.py that can also be set at the
- suite-level.
- """
+ """Represent top-level options to resmoke.py that can also be set at the suite-level."""
INHERIT = object()
ALL_INHERITED = None
@classmethod
def combine(cls, *suite_options_list):
- """
- Returns a SuiteOptions instance representing the combination of all SuiteOptions in
- 'suite_options_list'.
+ """Return SuiteOptions instance.
+
+ This object represents the combination of all SuiteOptions in 'suite_options_list'.
"""
combined_options = cls.ALL_INHERITED._asdict()
@@ -158,8 +153,9 @@ class SuiteOptions(_SuiteOptions):
return cls(**combined_options)
def resolve(self):
- """
- Returns a SuiteOptions instance representing the options overridden at the suite-level and
+ """Return a SuiteOptions instance.
+
+ This represents the options overridden at the suite-level and
the inherited options from the top-level.
"""
@@ -183,8 +179,8 @@ class SuiteOptions(_SuiteOptions):
return SuiteOptions(**options)
-SuiteOptions.ALL_INHERITED = SuiteOptions(**dict(
- zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
+SuiteOptions.ALL_INHERITED = SuiteOptions( # type: ignore
+ **dict(zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
##
# Variables that are set by the user at the command line or with --options.
diff --git a/buildscripts/resmokelib/core/__init__.py b/buildscripts/resmokelib/core/__init__.py
index 29a19a52500..78a8b4924b8 100644
--- a/buildscripts/resmokelib/core/__init__.py
+++ b/buildscripts/resmokelib/core/__init__.py
@@ -1,3 +1,4 @@
+"""Resmokelib core module."""
from __future__ import absolute_import
from . import process
diff --git a/buildscripts/resmokelib/core/network.py b/buildscripts/resmokelib/core/network.py
index eda2c95417e..f42e6a86d85 100644
--- a/buildscripts/resmokelib/core/network.py
+++ b/buildscripts/resmokelib/core/network.py
@@ -1,7 +1,4 @@
-"""
-Class used to allocate ports for use by various mongod and mongos
-processes involved in running the tests.
-"""
+"""Class used to allocate ports for mongod and mongos processes involved in running the tests."""
from __future__ import absolute_import
@@ -14,16 +11,14 @@ from .. import errors
def _check_port(func):
- """
- A decorator that verifies the port returned by the wrapped function
- is in the valid range.
+ """Provide decorator that verifies the port returned by the wrapped function is in range.
- Returns the port if it is valid, and raises a PortAllocationError
- otherwise.
+ Returns the port if it is valid, and raises a PortAllocationError otherwise.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
+ """Provide wrapper function."""
port = func(*args, **kwargs)
if port < 0:
@@ -39,8 +34,7 @@ def _check_port(func):
class PortAllocator(object):
- """
- This class is responsible for allocating ranges of ports.
+ """Class responsible for allocating ranges of ports.
It reserves a range of ports for each job with the first part of
that range used for the fixture started by that job, and the second
@@ -62,13 +56,12 @@ class PortAllocator(object):
_NUM_USED_PORTS_LOCK = threading.Lock()
# Used to keep track of how many ports a fixture has allocated.
- _NUM_USED_PORTS = collections.defaultdict(int)
+ _NUM_USED_PORTS = collections.defaultdict(int) # type: ignore
@classmethod
@_check_port
def next_fixture_port(cls, job_num):
- """
- Returns the next port for a fixture to use.
+ """Return the next port for a fixture to use.
Raises a PortAllocationError if the fixture has requested more
ports than are reserved per job, or if the next port is not a
@@ -91,9 +84,7 @@ class PortAllocator(object):
@classmethod
@_check_port
def min_test_port(cls, job_num):
- """
- For the given job, returns the lowest port that is reserved for
- use by tests.
+ """Return the lowest port that is reserved for use by tests, for specified job.
Raises a PortAllocationError if that port is higher than the
maximum port.
@@ -103,9 +94,7 @@ class PortAllocator(object):
@classmethod
@_check_port
def max_test_port(cls, job_num):
- """
- For the given job, returns the highest port that is reserved
- for use by tests.
+ """Return the highest port that is reserved for use by tests, for specified job.
Raises a PortAllocationError if that port is higher than the
maximum port.
@@ -115,8 +104,7 @@ class PortAllocator(object):
@classmethod
def reset(cls):
- """
- Resets the internal state of the PortAllocator.
+ """Reset the internal state of the PortAllocator.
This method is intended to be called each time resmoke.py starts
a new test suite.
diff --git a/buildscripts/resmokelib/core/pipe.py b/buildscripts/resmokelib/core/pipe.py
index bb080721b2d..5aba2ed9a81 100644
--- a/buildscripts/resmokelib/core/pipe.py
+++ b/buildscripts/resmokelib/core/pipe.py
@@ -1,6 +1,7 @@
"""
-Helper class to read output of a subprocess. Used to avoid deadlocks
-from the pipe buffer filling up and blocking the subprocess while it's
+Helper class to read output of a subprocess.
+
+Used to avoid deadlocks from the pipe buffer filling up and blocking the subprocess while it's
being waited on.
"""
@@ -9,11 +10,8 @@ from __future__ import absolute_import
import threading
-class LoggerPipe(threading.Thread):
- """
- Asynchronously reads the output of a subprocess and sends it to a
- logger.
- """
+class LoggerPipe(threading.Thread): # pylint: disable=too-many-instance-attributes
+ """Asynchronously reads the output of a subprocess and sends it to a logger."""
# The start() and join() methods are not intended to be called directly on the LoggerPipe
# instance. Since we override them for that effect, the super's version are preserved here.
@@ -21,10 +19,7 @@ class LoggerPipe(threading.Thread):
__join = threading.Thread.join
def __init__(self, logger, level, pipe_out):
- """
- Initializes the LoggerPipe with the specified logger, logging
- level to use, and pipe to read from.
- """
+ """Initialize the LoggerPipe with the specified arguments."""
threading.Thread.__init__(self)
# Main thread should not call join() when exiting
@@ -43,12 +38,11 @@ class LoggerPipe(threading.Thread):
LoggerPipe.__start(self)
def start(self):
+ """Start not implemented."""
raise NotImplementedError("start should not be called directly")
def run(self):
- """
- Reads the output from 'pipe_out' and logs each line to 'logger'.
- """
+ """Read the output from 'pipe_out' and logs each line to 'logger'."""
with self.__lock:
self.__started = True
@@ -70,14 +64,17 @@ class LoggerPipe(threading.Thread):
self.__condition.notify_all()
def join(self, timeout=None):
+ """Join not implemented."""
raise NotImplementedError("join should not be called directly")
def wait_until_started(self):
+ """Wait until started."""
with self.__lock:
while not self.__started:
self.__condition.wait()
def wait_until_finished(self):
+ """Wait until finished."""
with self.__lock:
while not self.__finished:
self.__condition.wait()
diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py
index 42f9454bd91..4b94b2dd016 100644
--- a/buildscripts/resmokelib/core/process.py
+++ b/buildscripts/resmokelib/core/process.py
@@ -1,5 +1,4 @@
-"""
-A more reliable way to create and destroy processes.
+"""A more reliable way to create and destroy processes.
Uses job objects when running on Windows to ensure that all created
processes are terminated.
@@ -30,12 +29,12 @@ if os.name == "posix" and sys.version_info[0] == 2:
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess
+ import subprocess # type: ignore
else:
import subprocess
-from . import pipe
-from .. import utils
+from . import pipe # pylint: disable=wrong-import-position
+from .. import utils # pylint: disable=wrong-import-position
# Attempt to avoid race conditions (e.g. hangs caused by a file descriptor being left open) when
# starting subprocesses concurrently from multiple threads by guarding calls to subprocess.Popen()
@@ -87,15 +86,12 @@ if sys.platform == "win32":
class Process(object):
- """
- Wrapper around subprocess.Popen class.
- """
+ """Wrapper around subprocess.Popen class."""
+
+ # pylint: disable=protected-access
def __init__(self, logger, args, env=None, env_vars=None):
- """
- Initializes the process with the specified logger, arguments,
- and environment.
- """
+ """Initialize the process with the specified logger, arguments, and environment."""
# Ensure that executable files that don't already have an
# extension on Windows have a ".exe" extension.
@@ -115,10 +111,7 @@ class Process(object):
self._stderr_pipe = None
def start(self):
- """
- Starts the process and the logger pipes for its stdout and
- stderr.
- """
+ """Start the process and the logger pipes for its stdout and stderr."""
creation_flags = 0
if sys.platform == "win32" and _JOB_OBJECT is not None:
@@ -158,12 +151,12 @@ class Process(object):
if return_code == win32con.STILL_ACTIVE:
raise
- def stop(self, kill=False):
+ def stop(self, kill=False): # pylint: disable=too-many-branches
"""Terminate the process."""
if sys.platform == "win32":
# Attempt to cleanly shutdown mongod.
- if not kill and len(self.args) > 0 and self.args[0].find("mongod") != -1:
+ if not kill and self.args and self.args[0].find("mongod") != -1:
mongo_signal_handle = None
try:
mongo_signal_handle = win32event.OpenEvent(
@@ -218,13 +211,11 @@ class Process(object):
raise
def poll(self):
+ """Poll."""
return self._process.poll()
def wait(self):
- """
- Waits until the process has terminated and all output has been
- consumed by the logger pipes.
- """
+ """Wait until process has terminated and all output has been consumed by the logger pipes."""
return_code = self._process.wait()
@@ -236,9 +227,7 @@ class Process(object):
return return_code
def as_command(self):
- """
- Returns an equivalent command line invocation of the process.
- """
+ """Return an equivalent command line invocation of the process."""
default_env = os.environ
env_diff = self.env.copy()
diff --git a/buildscripts/resmokelib/core/programs.py b/buildscripts/resmokelib/core/programs.py
index b868aa8a1ba..db7a98a52ce 100644
--- a/buildscripts/resmokelib/core/programs.py
+++ b/buildscripts/resmokelib/core/programs.py
@@ -1,5 +1,4 @@
-"""
-Utility functions to create MongoDB processes.
+"""Utility functions to create MongoDB processes.
Handles all the nitty-gritty parameter conversion.
"""
@@ -16,11 +15,9 @@ from .. import config
from .. import utils
-def mongod_program(logger, executable=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongod executable with
- arguments constructed from 'kwargs'.
- """
+def mongod_program( # pylint: disable=too-many-branches
+ logger, executable=None, process_kwargs=None, **kwargs):
+ """Return a Process instance that starts mongod arguments constructed from 'kwargs'."""
executable = utils.default_if_none(executable, config.DEFAULT_MONGOD_EXECUTABLE)
args = [executable]
@@ -117,10 +114,7 @@ def mongod_program(logger, executable=None, process_kwargs=None, **kwargs):
def mongos_program(logger, executable=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongos executable with
- arguments constructed from 'kwargs'.
- """
+ """Return a Process instance that starts a mongos with arguments constructed from 'kwargs'."""
executable = utils.default_if_none(executable, config.DEFAULT_MONGOS_EXECUTABLE)
args = [executable]
@@ -143,11 +137,12 @@ def mongos_program(logger, executable=None, process_kwargs=None, **kwargs):
return _process.Process(logger, args, **process_kwargs)
-def mongo_shell_program(logger, executable=None, connection_string=None, filename=None,
- process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongo shell with the given connection string and
- arguments constructed from 'kwargs'.
+def mongo_shell_program( # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ logger, executable=None, connection_string=None, filename=None, process_kwargs=None,
+ **kwargs):
+ """Return a Process instance that starts a mongo shell.
+
+ The shell is started with the given connection string and arguments constructed from 'kwargs'.
"""
connection_string = utils.default_if_none(config.SHELL_CONN_STRING, connection_string)
@@ -256,8 +251,7 @@ def mongo_shell_program(logger, executable=None, connection_string=None, filenam
def _format_shell_vars(sb, path, value):
- """
- Formats 'value' in a way that can be passed to --eval.
+ """Format 'value' in a way that can be passed to --eval.
If 'value' is a dictionary, then it is unrolled into the creation of
a new JSON object with properties assigned for each key of the
@@ -277,10 +271,7 @@ def _format_shell_vars(sb, path, value):
def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a dbtest executable with
- arguments constructed from 'kwargs'.
- """
+ """Return a Process instance that starts a dbtest with arguments constructed from 'kwargs'."""
executable = utils.default_if_none(executable, config.DEFAULT_DBTEST_EXECUTABLE)
args = [executable]
@@ -295,10 +286,11 @@ def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **
def generic_program(logger, args, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts an arbitrary executable with
- arguments constructed from 'kwargs'. The args parameter is an array
- of strings containing the command to execute.
+ """Return a Process instance that starts an arbitrary executable.
+
+ The executable arguments are constructed from 'kwargs'.
+
+ The args parameter is an array of strings containing the command to execute.
"""
if not utils.is_string_list(args):
@@ -311,9 +303,9 @@ def generic_program(logger, args, process_kwargs=None, **kwargs):
def _format_test_data_set_parameters(set_parameters):
- """
- Converts key-value pairs from 'set_parameters' into the comma
- delimited list format expected by the parser in servers.js.
+ """Convert key-value pairs from 'set_parameters' into a comma delimited list format.
+
+ The format is used by the parser in servers.js.
WARNING: the parsing logic in servers.js is very primitive.
Non-scalar options such as logComponentVerbosity will not work
@@ -332,9 +324,9 @@ def _format_test_data_set_parameters(set_parameters):
def _apply_set_parameters(args, set_parameter):
- """
- Converts key-value pairs from 'kwargs' into --setParameter key=value
- arguments to an executable and appends them to 'args'.
+ """Convert key-value pairs from 'kwargs' into --setParameter key=value arguments.
+
+ This result is appended to 'args'.
"""
for param_name in set_parameter:
@@ -347,10 +339,9 @@ def _apply_set_parameters(args, set_parameter):
def _apply_kwargs(args, kwargs):
- """
- Converts key-value pairs from 'kwargs' into --key value arguments
- to an executable and appends them to 'args'.
+ """Convert key-value pairs from 'kwargs' into --key value arguments.
+ This result is appended to 'args'.
A --flag without a value is represented with the empty string.
"""
@@ -363,9 +354,7 @@ def _apply_kwargs(args, kwargs):
def _set_keyfile_permissions(opts):
- """
- Change the permissions of keyfiles in 'opts' to 600, i.e. only the
- user can read and write the file.
+ """Change the permissions of keyfiles in 'opts' to 600, (only user can read and write the file).
This necessary to avoid having the mongod/mongos fail to start up
because "permissions on the keyfiles are too open".
diff --git a/buildscripts/resmokelib/errors.py b/buildscripts/resmokelib/errors.py
index 6ec329c14b5..8f49a567a79 100644
--- a/buildscripts/resmokelib/errors.py
+++ b/buildscripts/resmokelib/errors.py
@@ -1,59 +1,47 @@
-"""
-Exceptions raised by resmoke.py.
-"""
+"""Exceptions raised by resmoke.py."""
-class ResmokeError(Exception):
- """
- Base class for all resmoke.py exceptions.
- """
+class ResmokeError(Exception): # noqa: D204
+ """Base class for all resmoke.py exceptions."""
pass
-class SuiteNotFound(ResmokeError):
- """
- A suite that isn't recognized was specified.
- """
+class SuiteNotFound(ResmokeError): # noqa: D204
+ """A suite that isn't recognized was specified."""
pass
-class StopExecution(ResmokeError):
- """
- Exception that is raised when resmoke.py should stop executing tests
- if failing fast is enabled.
- """
+class StopExecution(ResmokeError): # noqa: D204
+ """Exception raised when resmoke.py should stop executing tests if failing fast is enabled."""
pass
-class UserInterrupt(StopExecution):
- """
- Exception that is raised when a user signals resmoke.py to
- unconditionally stop executing tests.
- """
+class UserInterrupt(StopExecution): # noqa: D204
+ """Exception raised when a user signals resmoke.py to unconditionally stop executing tests."""
pass
-class TestFailure(ResmokeError):
- """
- Exception that is raised by a hook in the after_test method if it
- determines the the previous test should be marked as a failure.
+class TestFailure(ResmokeError): # noqa: D204
+ """Exception raised by a hook in the after_test method.
+
+ Raised if it determines the the previous test should be marked as a failure.
"""
pass
-class ServerFailure(TestFailure):
- """
- Exception that is raised by a hook in the after_test method if it
- detects that the fixture did not exit cleanly and should be marked
+class ServerFailure(TestFailure): # noqa: D204
+ """Exception raised by a hook in the after_test method.
+
+ Raised if it detects that the fixture did not exit cleanly and should be marked
as a failure.
"""
pass
-class PortAllocationError(ResmokeError):
- """
- Exception that is raised by the PortAllocator if a port is requested
- outside of the range of valid ports, or if a fixture requests more
- ports than were reserved for that job.
+class PortAllocationError(ResmokeError): # noqa: D204
+ """Exception that is raised by the PortAllocator.
+
+ Raised if a port is requested outside of the range of valid ports, or if a
+ fixture requests more ports than were reserved for that job.
"""
pass
diff --git a/buildscripts/resmokelib/logging/__init__.py b/buildscripts/resmokelib/logging/__init__.py
index 816a62004d8..d0b4a48ac57 100644
--- a/buildscripts/resmokelib/logging/__init__.py
+++ b/buildscripts/resmokelib/logging/__init__.py
@@ -1,6 +1,4 @@
-"""
-Extension to the logging package to support buildlogger.
-"""
+"""Extension to the logging package to support buildlogger."""
from __future__ import absolute_import
diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py
index 56a5defc5a4..5fa52c85c69 100644
--- a/buildscripts/resmokelib/logging/buildlogger.py
+++ b/buildscripts/resmokelib/logging/buildlogger.py
@@ -1,6 +1,4 @@
-"""
-Defines handlers for communicating with a buildlogger server.
-"""
+"""Define handlers for communicating with a buildlogger server."""
from __future__ import absolute_import
@@ -27,19 +25,17 @@ BUILDLOGGER_FALLBACK = None
def _log_on_error(func):
- """
- A decorator that causes any exceptions to be logged by the
- "buildlogger" Logger instance.
+ """Provide decorator that causes exceptions to be logged by the "buildlogger" Logger instance.
- Returns the wrapped function's return value, or None if an error
- was encountered.
+ Return the wrapped function's return value, or None if an error was encountered.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
+ """Provide wrapper function."""
try:
return func(*args, **kwargs)
- except:
+ except: # pylint: disable=bare-except
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
return None
@@ -50,9 +46,8 @@ class _LogsSplitter(object):
"""Class with static methods used to split list of log lines into smaller batches."""
@staticmethod
- def split_logs(log_lines, max_size):
- """
- Splits the log lines into batches of size less than or equal to max_size.
+ def split_logs(log_lines, max_size): # noqa: D406,D407,D411,D413
+ """Split the log lines into batches of size less than or equal to max_size.
Args:
log_lines: A list of log lines.
@@ -65,8 +60,8 @@ class _LogsSplitter(object):
return [log_lines]
def line_size(line):
- """
- Computes the encoded JSON size of a log line as part of an array.
+ """Compute the encoded JSON size of a log line as part of an array.
+
2 is added to each string size to account for the array representation of the logs,
as each line is preceded by a '[' or a space and followed by a ',' or a ']'.
"""
@@ -88,17 +83,11 @@ class _LogsSplitter(object):
class _BaseBuildloggerHandler(handlers.BufferedHandler):
- """
- Base class of the buildlogger handler for the global logs and the
- handler for the test logs.
- """
+ """Base class of the buildlogger handler for global logs and handler for test logs."""
def __init__(self, build_config, endpoint, capacity=_SEND_AFTER_LINES,
interval_secs=_SEND_AFTER_SECS):
- """
- Initializes the buildlogger handler with the build id and
- credentials.
- """
+ """Initialize the buildlogger handler with the build id and credentials."""
handlers.BufferedHandler.__init__(self, capacity, interval_secs)
@@ -111,9 +100,9 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
self.max_size = None
def process_record(self, record):
- """
- Returns a tuple of the time the log record was created, and the
- message because the buildlogger expects the log messages
+ """Return a tuple of the time the log record was created, and the message.
+
+ This is necessary because the buildlogger expects the log messages to be
formatted in JSON as:
[ [ <log-time-1>, <log-message-1> ],
@@ -124,14 +113,11 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
return (record.created, msg)
def post(self, *args, **kwargs):
- """
- Convenience method for subclasses to use when making POST requests.
- """
+ """Provide convenience method for subclasses to use when making POST requests."""
return self.http_handler.post(*args, **kwargs)
- def _append_logs(self, log_lines):
- """
- Sends a POST request to the handlers endpoint with the logs that have been captured.
+ def _append_logs(self, log_lines): # noqa: D406,D407,D413
+ """Send a POST request to the handlers endpoint with the logs that have been captured.
Returns:
The number of log lines that have been successfully sent.
@@ -145,10 +131,8 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
break
return lines_sent
- def __append_logs_chunk(self, log_lines_chunk):
- """
- Sends a log lines chunk, handles 413 Request Entity Too Large errors and retries
- if necessary.
+ def __append_logs_chunk(self, log_lines_chunk): # noqa: D406,D407,D413
+ """Send log lines chunk, handle 413 Request Entity Too Large errors & retry, if necessary.
Returns:
The number of log lines that have been successfully sent.
@@ -173,14 +157,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
return self._append_logs(log_lines_chunk)
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
return 0
- except:
+ except: # pylint: disable=bare-except
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
return 0
def _flush_buffer_with_lock(self, buf, close_called):
- """
- Ensures all logging output has been flushed to the buildlogger
- server.
+ """Ensure all logging output has been flushed to the buildlogger server.
If _append_logs() returns false, then the log messages are added
to a separate buffer and retried the next time flush() is
@@ -205,13 +187,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
class BuildloggerTestHandler(_BaseBuildloggerHandler):
- """
- Buildlogger handler for the test logs.
- """
+ """Buildlogger handler for the test logs."""
- def __init__(self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES,
- interval_secs=_SEND_AFTER_SECS):
- """Initializes the buildlogger handler with the credentials, build id, and test id."""
+ def __init__( # pylint: disable=too-many-arguments
+ self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES,
+ interval_secs=_SEND_AFTER_SECS):
+ """Initialize the buildlogger handler with the credentials, build id, and test id."""
endpoint = APPEND_TEST_LOGS_ENDPOINT % {
"build_id": build_id,
"test_id": test_id,
@@ -220,19 +201,14 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
@_log_on_error
def _finish_test(self, failed=False):
- """
- Sends a POST request to the APPEND_TEST_LOGS_ENDPOINT with the
- test status.
- """
+ """Send a POST request to the APPEND_TEST_LOGS_ENDPOINT with the test status."""
self.post(self.endpoint, headers={
"X-Sendlogs-Test-Done": "true",
"X-Sendlogs-Test-Failed": "true" if failed else "false",
})
def close(self):
- """
- Closes the buildlogger handler.
- """
+ """Close the buildlogger handler."""
_BaseBuildloggerHandler.close(self)
@@ -241,13 +217,11 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
class BuildloggerGlobalHandler(_BaseBuildloggerHandler):
- """
- Buildlogger handler for the global logs.
- """
+ """Buildlogger handler for the global logs."""
def __init__(self, build_config, build_id, capacity=_SEND_AFTER_LINES,
interval_secs=_SEND_AFTER_SECS):
- """Initializes the buildlogger handler with the credentials and build id."""
+ """Initialize the buildlogger handler with the credentials and build id."""
endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
_BaseBuildloggerHandler.__init__(self, build_config, endpoint, capacity, interval_secs)
@@ -261,6 +235,7 @@ class BuildloggerServer(object):
@_log_on_error
def __init__(self):
+ """Initialize BuildloggerServer."""
tmp_globals = {}
self.config = {}
execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config)
@@ -277,9 +252,7 @@ class BuildloggerServer(object):
@_log_on_error
def new_build_id(self, suffix):
- """
- Returns a new build id for sending global logs to.
- """
+ """Return a new build id for sending global logs to."""
username = self.config["username"]
password = self.config["password"]
builder = "%s_%s" % (self.config["builder"], suffix)
@@ -298,9 +271,7 @@ class BuildloggerServer(object):
@_log_on_error
def new_test_id(self, build_id, test_filename, test_command):
- """
- Returns a new test id for sending test logs to.
- """
+ """Return a new test id for sending test logs to."""
handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL,
username=self.config["username"],
password=self.config["password"])
@@ -317,19 +288,23 @@ class BuildloggerServer(object):
return response["id"]
def get_global_handler(self, build_id, handler_info):
+ """Return the global handler."""
return BuildloggerGlobalHandler(self.config, build_id, **handler_info)
def get_test_handler(self, build_id, test_id, handler_info):
+ """Return the test handler."""
return BuildloggerTestHandler(self.config, build_id, test_id, **handler_info)
@staticmethod
def get_build_log_url(build_id):
+ """Return the build log URL."""
base_url = _config.BUILDLOGGER_URL.rstrip("/")
endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
return "%s/%s" % (base_url, endpoint.strip("/"))
@staticmethod
def get_test_log_url(build_id, test_id):
+ """Return the test log URL."""
base_url = _config.BUILDLOGGER_URL.rstrip("/")
endpoint = APPEND_TEST_LOGS_ENDPOINT % {"build_id": build_id, "test_id": test_id}
return "%s/%s" % (base_url, endpoint.strip("/"))
diff --git a/buildscripts/resmokelib/logging/flush.py b/buildscripts/resmokelib/logging/flush.py
index 5b2b488e51a..f5c2b356468 100644
--- a/buildscripts/resmokelib/logging/flush.py
+++ b/buildscripts/resmokelib/logging/flush.py
@@ -1,6 +1,6 @@
-"""
-Manages a thread responsible for periodically calling flush() on
-logging.Handler instances used to send logs to buildlogger.
+"""Manage a thread responsible for periodically calling flush() on logging.Handler instances.
+
+These instances are used to send logs to buildlogger.
"""
from __future__ import absolute_import
@@ -16,11 +16,9 @@ _FLUSH_THREAD = None
def start_thread():
- """
- Starts the flush thread.
- """
+ """Start the flush thread."""
- global _FLUSH_THREAD
+ global _FLUSH_THREAD # pylint: disable=global-statement
with _FLUSH_THREAD_LOCK:
if _FLUSH_THREAD is not None:
raise ValueError("FlushThread has already been started")
@@ -30,9 +28,7 @@ def start_thread():
def stop_thread():
- """
- Signals the flush thread to stop and waits until it does.
- """
+ """Signal the flush thread to stop and wait until it does."""
with _FLUSH_THREAD_LOCK:
if _FLUSH_THREAD is None:
@@ -44,12 +40,9 @@ def stop_thread():
def flush_after(handler, delay):
- """
- Adds 'handler' to the queue so that it is flushed after 'delay'
- seconds by the flush thread.
+ """Add 'handler' to the queue so that it is flushed after 'delay' seconds by the flush thread.
- Returns the scheduled event which may be used for later cancellation
- (see cancel()).
+ Return the scheduled event which may be used for later cancellation (see cancel()).
"""
if not isinstance(handler, logging.Handler):
@@ -59,12 +52,9 @@ def flush_after(handler, delay):
def close_later(handler):
- """
- Adds 'handler' to the queue so that it is closed later by the flush
- thread.
+ """Add 'handler' to the queue so that it is closed later by the flush thread.
- Returns the scheduled event which may be used for later cancelation
- (see cancel()).
+ Return the scheduled event which may be used for later cancelation (see cancel()).
"""
if not isinstance(handler, logging.Handler):
@@ -78,36 +68,27 @@ def close_later(handler):
def cancel(event):
- """
- Attempts to cancel the specified event.
+ """Attempt to cancel the specified event.
- Returns true if the event was successfully canceled, and returns
- false otherwise.
+ Returns true if the event was successfully canceled, and returns false otherwise.
"""
return _FLUSH_THREAD.cancel_event(event)
class _FlushThread(threading.Thread):
- """
- Asynchronously flushes and closes logging handlers.
- """
+ """Asynchronously flush and close logging handlers."""
_TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
def __init__(self):
- """
- Initializes the flush thread.
- """
+ """Initialize the flush thread."""
threading.Thread.__init__(self, name="FlushThread")
# Do not wait to flush the logs if interrupted by the user.
self.daemon = True
def interruptible_sleep(secs):
- """
- Waits up to 'secs' seconds or for the
- 'self.__schedule_updated' event to be set.
- """
+ """Wait up to 'secs' seconds or for the 'self.__schedule_updated' event to be set."""
# Setting 'self.__schedule_updated' in submit() will cause the scheduler to return early
# from its 'delayfunc'. This makes it so that if a new event is scheduled with
@@ -121,9 +102,7 @@ class _FlushThread(threading.Thread):
self.__terminated = threading.Event()
def run(self):
- """
- Continuously flushes and closes logging handlers.
- """
+ """Continuously flush and close logging handlers."""
try:
while not (self.__should_stop.is_set() and self.__scheduler.empty()):
@@ -146,9 +125,9 @@ class _FlushThread(threading.Thread):
self.__terminated.set()
def signal_shutdown(self):
- """
- Indicates to the flush thread that it should exit once its
- current queue of logging handlers are flushed and closed.
+ """Indicate to the flush thread that it should exit.
+
+ This will happen once its current queue of logging handlers are flushed and closed.
"""
self.__should_stop.set()
@@ -158,21 +137,16 @@ class _FlushThread(threading.Thread):
self.__schedule_updated.set()
def await_shutdown(self):
- """
- Waits for the flush thread to finish processing its current
- queue of logging handlers.
- """
+ """Wait for the flush thread to finish processing its current queue of logging handlers."""
while not self.__terminated.is_set():
# Need to pass a timeout to wait() so that KeyboardInterrupt exceptions are propagated.
self.__terminated.wait(_FlushThread._TIMEOUT)
def submit(self, action, delay):
- """
- Schedules 'action' for 'delay' seconds from now.
+ """Schedule 'action' for 'delay' seconds from now.
- Returns the scheduled event which may be used for later
- cancelation (see cancel_event()).
+ Return the scheduled event which may be used for later cancelation (see cancel_event()).
"""
event = self.__scheduler.enter(delay, 0, action, ())
@@ -180,11 +154,9 @@ class _FlushThread(threading.Thread):
return event
def cancel_event(self, event):
- """
- Attempts to cancel the specified event.
+ """Attempt to cancel the specified event.
- Returns true if the event was successfully canceled, and returns
- false otherwise.
+ Return true if the event was successfully canceled, and returns false otherwise.
"""
try:
diff --git a/buildscripts/resmokelib/logging/formatters.py b/buildscripts/resmokelib/logging/formatters.py
index 058c6d512c8..450d5d29cd8 100644
--- a/buildscripts/resmokelib/logging/formatters.py
+++ b/buildscripts/resmokelib/logging/formatters.py
@@ -1,22 +1,20 @@
-"""
-Custom formatters for the logging handlers.
-"""
+"""Custom formatters for the logging handlers."""
from __future__ import absolute_import
import logging
-import sys
import time
class ISO8601Formatter(logging.Formatter):
- """
- An ISO 8601 compliant formatter for log messages. It formats the
- timezone as an hour/minute offset and uses a period as the
+ """An ISO 8601 compliant formatter for log messages.
+
+ It formats the timezone as an hour/minute offset and uses a period as the
millisecond separator in order to match the log messages of MongoDB.
"""
def formatTime(self, record, datefmt=None):
+ """Return formatted time."""
converted_time = self.converter(record.created)
if datefmt is not None:
@@ -28,10 +26,7 @@ class ISO8601Formatter(logging.Formatter):
@staticmethod
def _format_timezone_offset(converted_time):
- """
- Returns the timezone as an hour/minute offset in the form
- "+HHMM" or "-HHMM".
- """
+ """Return the timezone as an hour/minute offset in the form "+HHMM" or "-HHMM"."""
# Windows treats %z in the format string as %Z, so we compute the hour/minute offset
# manually.
diff --git a/buildscripts/resmokelib/logging/handlers.py b/buildscripts/resmokelib/logging/handlers.py
index 28e35d25a09..982a2f38b6e 100644
--- a/buildscripts/resmokelib/logging/handlers.py
+++ b/buildscripts/resmokelib/logging/handlers.py
@@ -1,7 +1,4 @@
-"""
-Additional handlers that are used as the base classes of the buildlogger
-handler.
-"""
+"""Additional handlers that are used as the base classes of the buildlogger handler."""
from __future__ import absolute_import
@@ -27,17 +24,16 @@ _TIMEOUT_SECS = 10
class BufferedHandler(logging.Handler):
- """
- A handler class that buffers logging records in memory. Whenever
- each record is added to the buffer, a check is made to see if the
- buffer should be flushed. If it should, then flush() is expected to
- do what's needed.
+ """A handler class that buffers logging records in memory.
+
+ Whenever each record is added to the buffer, a check is made to see if the buffer
+ should be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity, interval_secs):
- """
- Initializes the handler with the buffer size and timeout after
- which the buffer is flushed regardless.
+ """Initialize the handler with the buffer size and timeout.
+
+ These values determine when the buffer is flushed regardless.
"""
logging.Handler.__init__(self)
@@ -68,18 +64,19 @@ class BufferedHandler(logging.Handler):
# close() serialize accesses to 'self.__emit_buffer' in a more granular way via
# 'self.__emit_lock'.
def createLock(self):
+ """Create lock."""
pass
def acquire(self):
+ """Acquire."""
pass
def release(self):
+ """Release."""
pass
- def process_record(self, record):
- """
- Applies a transformation to the record before it gets added to
- the buffer.
+ def process_record(self, record): # pylint: disable=no-self-use
+ """Apply a transformation to the record before it gets added to the buffer.
The default implementation returns 'record' unmodified.
"""
@@ -87,8 +84,7 @@ class BufferedHandler(logging.Handler):
return record
def emit(self, record):
- """
- Emits a record.
+ """Emit a record.
Append the record to the buffer after it has been transformed by
process_record(). If the length of the buffer is greater than or
@@ -117,9 +113,7 @@ class BufferedHandler(logging.Handler):
self.__flush_scheduled_by_emit = True
def flush(self):
- """
- Ensures all logging output has been flushed.
- """
+ """Ensure all logging output has been flushed."""
self.__flush(close_called=False)
@@ -132,9 +126,7 @@ class BufferedHandler(logging.Handler):
self.__flush_scheduled_by_emit = False
def __flush(self, close_called):
- """
- Ensures all logging output has been flushed.
- """
+ """Ensure all logging output has been flushed."""
with self.__emit_lock:
buf = self.__emit_buffer
@@ -147,18 +139,13 @@ class BufferedHandler(logging.Handler):
self._flush_buffer_with_lock(buf, close_called)
def _flush_buffer_with_lock(self, buf, close_called):
- """
- Ensures all logging output has been flushed.
- """
+ """Ensure all logging output has been flushed."""
raise NotImplementedError("_flush_buffer_with_lock must be implemented by BufferedHandler"
" subclasses")
def close(self):
- """
- Flushes the buffer and tidies up any resources used by this
- handler.
- """
+ """Flush the buffer and tidies up any resources used by this handler."""
with self.__emit_lock:
if self.__flush_event is not None:
@@ -170,15 +157,10 @@ class BufferedHandler(logging.Handler):
class HTTPHandler(object):
- """
- A class which sends data to a web server using POST requests.
- """
+ """A class which sends data to a web server using POST requests."""
def __init__(self, url_root, username, password):
- """
- Initializes the handler with the necessary authentication
- credentials.
- """
+ """Initialize the handler with the necessary authentication credentials."""
self.auth_handler = requests.auth.HTTPBasicAuth(username, password)
@@ -188,11 +170,9 @@ class HTTPHandler(object):
return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/"))
def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS):
- """
- Sends a POST request to the specified endpoint with the supplied
- data.
+ """Send a POST request to the specified endpoint with the supplied data.
- Returns the response, either as a string or a JSON object based
+ Return the response, either as a string or a JSON object based
on the content type.
"""
diff --git a/buildscripts/resmokelib/logging/loggers.py b/buildscripts/resmokelib/logging/loggers.py
index a53186b7aa3..d319928eb5f 100644
--- a/buildscripts/resmokelib/logging/loggers.py
+++ b/buildscripts/resmokelib/logging/loggers.py
@@ -1,6 +1,4 @@
-"""
-Module to hold the logger instances themselves.
-"""
+"""Module to hold the logger instances themselves."""
from __future__ import absolute_import
@@ -20,17 +18,21 @@ EXECUTOR_LOGGER = None
def _build_logger_server(logging_config):
- """Create and return a new BuildloggerServer if "buildlogger" is configured as
- one of the handler class in the configuration, return None otherwise.
+ """Create and return a new BuildloggerServer.
+
+ This occurs if "buildlogger" is configured as one of the handler class in the configuration,
+ return None otherwise.
"""
for logger_name in (FIXTURE_LOGGER_NAME, TESTS_LOGGER_NAME):
logger_info = logging_config[logger_name]
for handler_info in logger_info["handlers"]:
if handler_info["class"] == "buildlogger":
return buildlogger.BuildloggerServer()
+ return None
def configure_loggers(logging_config):
+ """Configure the loggers."""
buildlogger.BUILDLOGGER_FALLBACK = BaseLogger("buildlogger")
# The 'buildlogger' prefix is not added to the fallback logger since the prefix of the original
# logger will be there as part of the logged message.
@@ -39,7 +41,7 @@ def configure_loggers(logging_config):
build_logger_server = _build_logger_server(logging_config)
fixture_logger = FixtureRootLogger(logging_config, build_logger_server)
tests_logger = TestsRootLogger(logging_config, build_logger_server)
- global EXECUTOR_LOGGER
+ global EXECUTOR_LOGGER # pylint: disable=global-statement
EXECUTOR_LOGGER = ExecutorRootLogger(logging_config, build_logger_server, fixture_logger,
tests_logger)
@@ -68,7 +70,7 @@ class BaseLogger(logging.Logger):
@property
def build_logger_server(self):
- """The configured BuildloggerServer instance, or None."""
+ """Get the configured BuildloggerServer instance, or None."""
if self._build_logger_server:
return self._build_logger_server
elif self.parent:
@@ -78,7 +80,7 @@ class BaseLogger(logging.Logger):
@property
def logging_config(self):
- """The logging configuration."""
+ """Get the logging configuration."""
if self._logging_config:
return self._logging_config
elif self.parent:
@@ -88,6 +90,7 @@ class BaseLogger(logging.Logger):
@staticmethod
def get_formatter(logger_info):
+ """Return formatter."""
log_format = logger_info.get("format", _DEFAULT_FORMAT)
return formatters.ISO8601Formatter(fmt=log_format)
@@ -107,7 +110,7 @@ class RootLogger(BaseLogger):
def _configure(self):
if self.name not in self.logging_config:
- raise ValueError("Logging configuration should contain the %s component", self.name)
+ raise ValueError("Logging configuration should contain the %s component" % self.name)
logger_info = self.logging_config[self.name]
formatter = self.get_formatter(logger_info)
@@ -158,6 +161,8 @@ class ExecutorRootLogger(RootLogger):
class JobLogger(BaseLogger):
+ """JobLogger class."""
+
def __init__(self, test_kind, job_num, parent, fixture_root_logger):
"""Initialize a JobLogger.
@@ -200,7 +205,10 @@ class JobLogger(BaseLogger):
class TestLogger(BaseLogger):
- def __init__(self, test_name, parent, build_id=None, test_id=None, url=None):
+ """TestLogger class."""
+
+ def __init__( # pylint: disable=too-many-arguments
+ self, test_name, parent, build_id=None, test_id=None, url=None):
"""Initialize a TestLogger.
:param test_name: the test name.
@@ -245,6 +253,8 @@ class FixtureRootLogger(RootLogger):
class FixtureLogger(BaseLogger):
+ """FixtureLogger class."""
+
def __init__(self, fixture_class, job_num, build_id, fixture_root_logger):
"""Initialize a FixtureLogger.
@@ -277,6 +287,8 @@ class FixtureLogger(BaseLogger):
class FixtureNodeLogger(BaseLogger):
+ """FixtureNodeLogger class."""
+
def __init__(self, fixture_class, job_num, node_name, fixture_logger):
"""Initialize a FixtureNodeLogger.
@@ -310,6 +322,8 @@ class TestsRootLogger(RootLogger):
class TestQueueLogger(BaseLogger):
+ """TestQueueLogger class."""
+
def __init__(self, test_kind, tests_root_logger):
"""Initialize a TestQueueLogger.
@@ -320,6 +334,8 @@ class TestQueueLogger(BaseLogger):
class HookLogger(BaseLogger):
+ """HookLogger class."""
+
def __init__(self, hook_class, fixture_logger, tests_root_logger):
"""Initialize a HookLogger.
@@ -337,9 +353,7 @@ class HookLogger(BaseLogger):
def _fallback_buildlogger_handler(include_logger_name=True):
- """
- Returns a handler that writes to stderr.
- """
+ """Return a handler that writes to stderr."""
if include_logger_name:
log_format = "[fallback] [%(name)s] %(message)s"
else:
@@ -353,10 +367,7 @@ def _fallback_buildlogger_handler(include_logger_name=True):
def _get_buildlogger_handler_info(logger_info):
- """
- Returns the buildlogger handler information if it exists, and None
- otherwise.
- """
+ """Return the buildlogger handler information if it exists, and None otherwise."""
for handler_info in logger_info["handlers"]:
handler_info = handler_info.copy()
if handler_info.pop("class") == "buildlogger":
diff --git a/buildscripts/resmokelib/parser.py b/buildscripts/resmokelib/parser.py
index 0aeb969688c..d9f40da3e90 100644
--- a/buildscripts/resmokelib/parser.py
+++ b/buildscripts/resmokelib/parser.py
@@ -1,6 +1,4 @@
-"""
-Parser for command line arguments.
-"""
+"""Parser for command line arguments."""
from __future__ import absolute_import
@@ -15,10 +13,8 @@ from . import utils
from .. import resmokeconfig
-def parse_command_line():
- """
- Parses the command line arguments passed to resmoke.py.
- """
+def parse_command_line(): # pylint: disable=too-many-statements
+ """Parse the command line arguments passed to resmoke.py."""
parser = optparse.OptionParser()
@@ -304,9 +300,7 @@ def parse_command_line():
def validate_options(parser, options, args):
- """
- Do preliminary validation on the options and error on any invalid options.
- """
+ """Do preliminary validation on the options and error on any invalid options."""
if options.shell_port is not None and options.shell_conn_string is not None:
parser.error("Cannot specify both `shellPort` and `shellConnString`")
@@ -318,9 +312,7 @@ def validate_options(parser, options, args):
def validate_benchmark_options():
- """
- Some options are incompatible with benchmark test suites, we error out early if any of
- these options are specified.
+ """Error out early if any options are incompatible with benchmark test suites.
:return: None
"""
@@ -338,10 +330,12 @@ def validate_benchmark_options():
def get_logging_config(values):
+ """Return logging config values."""
return _get_logging_config(values.logger_file)
-def update_config_vars(values):
+def update_config_vars(values): # pylint: disable=too-many-statements
+ """Update config vars."""
config = _config.DEFAULTS.copy()
# Override `config` with values from command line arguments.
@@ -436,10 +430,7 @@ def update_config_vars(values):
def _get_logging_config(pathname):
- """
- Attempts to read a YAML configuration from 'pathname' that describes
- how resmoke.py should log the tests and fixtures.
- """
+ """Read YAML configuration from 'pathname' how to log tests and fixtures."""
# Named loggers are specified as the basename of the file, without the .yml extension.
if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname):
@@ -454,17 +445,14 @@ def _get_logging_config(pathname):
def _expand_user(pathname):
- """
- Wrapper around os.path.expanduser() to do nothing when given None.
- """
+ """Provide wrapper around os.path.expanduser() to do nothing when given None."""
if pathname is None:
return None
return os.path.expanduser(pathname)
def _tags_from_list(tags_list):
- """
- Returns the list of tags from a list of tag parameter values.
+ """Return the list of tags from a list of tag parameter values.
Each parameter value in the list may be a list of comma separated tags, with empty strings
ignored.
diff --git a/buildscripts/resmokelib/reportfile.py b/buildscripts/resmokelib/reportfile.py
index 7dcf5623a6d..00841de2bc9 100644
--- a/buildscripts/resmokelib/reportfile.py
+++ b/buildscripts/resmokelib/reportfile.py
@@ -1,6 +1,4 @@
-"""
-Manages interactions with the report.json file.
-"""
+"""Manage interactions with the report.json file."""
from __future__ import absolute_import
@@ -11,10 +9,7 @@ from .testing import report as _report
def write(suites):
- """
- Writes the combined report of all executions if --reportFile was
- specified on the command line.
- """
+ """Write the combined report of all executions if --reportFile was specified."""
if config.REPORT_FILE is None:
return
diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py
index d83cddd9142..3014012511d 100644
--- a/buildscripts/resmokelib/selector.py
+++ b/buildscripts/resmokelib/selector.py
@@ -1,5 +1,4 @@
-"""
-Test selection utility.
+"""Test selection utility.
Defines filtering rules for what tests to include in a suite depending
on whether they apply to C++ unit tests, dbtests, or JS tests.
@@ -32,15 +31,17 @@ class TestFileExplorer(object):
The file related code has been confined to this class for testability.
"""
- def is_glob_pattern(self, path):
- """Indicates if the provided path is a glob pattern.
+ @staticmethod
+ def is_glob_pattern(path):
+ """Indicate if the provided path is a glob pattern.
See buildscripts.resmokelib.utils.globstar.is_glob_pattern().
"""
return globstar.is_glob_pattern(path)
- def iglob(self, pattern):
- """Expands the given glob pattern with regard to the current working directory.
+ @staticmethod
+ def iglob(pattern): # noqa: D406,D407,D411,D413
+ """Expand the given glob pattern with regard to the current working directory.
See buildscripts.resmokelib.utils.globstar.iglob().
Returns:
@@ -48,8 +49,9 @@ class TestFileExplorer(object):
"""
return globstar.iglob(pattern)
- def jstest_tags(self, file_path):
- """Extracts the tags from a JavaScript test file.
+ @staticmethod
+ def jstest_tags(file_path): # noqa: D406,D407,D411,D413
+ """Extract the tags from a JavaScript test file.
See buildscripts.resmokelib.utils.jscomment.get_tags().
Returns:
@@ -57,8 +59,9 @@ class TestFileExplorer(object):
"""
return jscomment.get_tags(file_path)
- def read_root_file(self, root_file_path):
- """Reads a file containing the list of root test files.
+ @staticmethod
+ def read_root_file(root_file_path): # noqa: D406,D407,D411,D413
+ """Read a file containing the list of root test files.
Args:
root_file_path: the path to a file containing the path of each test on a separate line.
@@ -72,19 +75,21 @@ class TestFileExplorer(object):
tests.append(test_path)
return tests
- def fnmatchcase(self, name, pattern):
- """Indicates if the given name matches the given pattern.
+ @staticmethod
+ def fnmatchcase(name, pattern):
+ """Indicate if the given name matches the given pattern.
See buildscripts.resmokelib.utils.fnmatch.fnmatchcase().
"""
return fnmatch.fnmatchcase(name, pattern)
- def isfile(self, path):
- """Indicates if the given path corresponds to an existing file."""
+ @staticmethod
+ def isfile(path):
+ """Indicate if the given path corresponds to an existing file."""
return os.path.isfile(path)
def list_dbtests(self, dbtest_binary):
- """Lists the available dbtests suites."""
+ """List the available dbtests suites."""
returncode, stdout = self._run_program(dbtest_binary, ["--list"])
if returncode != 0:
@@ -92,8 +97,9 @@ class TestFileExplorer(object):
return stdout.splitlines()
- def _run_program(self, binary, args):
- """Runs a program.
+ @staticmethod
+ def _run_program(binary, args): # noqa: D406,D407,D411,D413
+ """Run a program.
Args:
binary: the binary to run.
@@ -108,9 +114,11 @@ class TestFileExplorer(object):
return program.returncode, stdout
- def parse_tag_file(self, test_kind):
- """
- Parses the tag file and return a dict of tagged tests, with the key the filename and the
+ @staticmethod
+ def parse_tag_file(test_kind):
+ """Parse the tag file and return a dict of tagged tests.
+
+ The resulting dict will have as a key the filename and the
value a list of tags, i.e., {'file1.js': ['tag1', 'tag2'], 'file2.js': ['tag2', 'tag3']}.
"""
tagged_tests = collections.defaultdict(list)
@@ -141,7 +149,7 @@ class _TestList(object):
"""
def __init__(self, test_file_explorer, roots, tests_are_files=True):
- """Initializes the _TestList with a TestFileExplorer component and a list of root tests."""
+ """Initialize the _TestList with a TestFileExplorer component and a list of root tests."""
self._test_file_explorer = test_file_explorer
self._tests_are_files = tests_are_files
self._roots = self._expand_files(roots) if tests_are_files else roots
@@ -159,12 +167,12 @@ class _TestList(object):
return expanded_tests
def include_files(self, include_files, force=False):
- """Filters the test list so that it only includes files matching 'include_files'.
+ """Filter the test list so that it only includes files matching 'include_files'.
- Args:
- include_files: a list of paths or glob patterns that match the files to include.
- force: if True include the matching files that were previously excluded, otherwise
- only include files that match and were not previously excluded from this _TestList.
+ Args:
+ include_files: a list of paths or glob patterns that match the files to include.
+ force: if True include the matching files that were previously excluded, otherwise only
+ include files that match and were not previously excluded from this _TestList.
"""
if not self._tests_are_files:
raise TypeError("_TestList does not contain files.")
@@ -178,8 +186,8 @@ class _TestList(object):
if force:
self._filtered |= set(self._roots) & expanded_include_files
- def exclude_files(self, exclude_files):
- """Excludes from the test list the files that match elements from 'exclude_files'.
+ def exclude_files(self, exclude_files): # noqa: D406,D407,D411,D413
+ """Exclude from the test list the files that match elements from 'exclude_files'.
Args:
exclude_files: a list of paths or glob patterns that match the files to exclude.
@@ -201,7 +209,7 @@ class _TestList(object):
self._filtered.discard(path)
def match_tag_expression(self, tag_expression, get_tags):
- """Filters the test list to only include tests that match the tag expression.
+ """Filter the test list to only include tests that match the tag expression.
Args:
tag_expression: a callable object that takes a list of tags and indicate if the required
@@ -212,11 +220,10 @@ class _TestList(object):
self._filtered = {test for test in self._filtered if tag_expression(get_tags(test))}
def include_any_pattern(self, patterns):
- """
- Filters the test list to only include tests that match any of the given glob patterns.
- """
+ """Filter the test list to only include tests that match any provided glob patterns."""
def match(test):
+ """Return True if 'test' matches a pattern."""
for pattern in patterns:
if test == pattern or fnmatch.fnmatchcase(test, pattern):
return True
@@ -225,8 +232,7 @@ class _TestList(object):
self._filtered = {test for test in self._filtered if match(test)}
def get_tests(self):
- """
- Returns the test list as a list(str).
+ """Return the test list as a list(str).
The tests are returned in the same order as they are found in the root tests.
"""
@@ -287,7 +293,7 @@ class _MatchExpression(object):
def make_expression(conf):
- """Creates a tag matching expression from an expression configuration.
+ """Create a tag matching expression from an expression configuration.
The syntax for the expression configuration is:
- expr: str_expr | dict_expr
@@ -325,11 +331,10 @@ def _make_expression_list(configs):
class _SelectorConfig(object):
"""Base object to represent the configuration for test selection."""
- def __init__(self, root=None, roots=None, include_files=None, exclude_files=None,
- include_tags=None, exclude_tags=None, include_with_any_tags=None,
- exclude_with_any_tags=None):
- """
- Initializes the _SelectorConfig from the configuration elements.
+ def __init__( # pylint: disable=too-many-arguments
+ self, root=None, roots=None, include_files=None, exclude_files=None, include_tags=None,
+ exclude_tags=None, include_with_any_tags=None, exclude_with_any_tags=None):
+ """Initialize the _SelectorConfig from the configuration elements.
Args:
root: the path to a file containing the list of root tests. Incompatible with 'roots'.
@@ -367,10 +372,8 @@ class _SelectorConfig(object):
return set(list_b)
elif list_b is None:
return set(list_a)
- else:
- return set(list_a) | set(list_b)
- else:
- return None
+ return set(list_a) | set(list_b)
+ return None
@staticmethod
def __make_tags_expression(include_tags, exclude_tags, include_with_any_tags,
@@ -389,16 +392,14 @@ class _SelectorConfig(object):
if expressions:
return _AllOfExpression(expressions)
- else:
- return None
+ return None
class _Selector(object):
"""Selection algorithm to select tests matching a selector configuration."""
def __init__(self, test_file_explorer, tests_are_files=True):
- """
- Initializes the _Selector.
+ """Initialize the _Selector.
Args:
test_file_explorer: a TestFileExplorer instance.
@@ -406,7 +407,7 @@ class _Selector(object):
self._test_file_explorer = test_file_explorer
self._tests_are_files = tests_are_files
- def select(self, selector_config):
+ def select(self, selector_config): # noqa: D406,D407,D411,D413
"""Select the test files that match the given configuration.
Args:
@@ -434,17 +435,18 @@ class _Selector(object):
test_list.include_files(selector_config.include_files, force=True)
return test_list.get_tests()
- def get_tags(self, test_file):
- """Retrieves the tags associated with the give test file."""
+ @staticmethod
+ def get_tags(test_file): # pylint: disable=unused-argument
+ """Retrieve the tags associated with the give test file."""
return []
class _JSTestSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for js_test tests."""
- def __init__(self, roots=None, include_files=None, exclude_files=None,
- include_with_any_tags=None, exclude_with_any_tags=None, include_tags=None,
- exclude_tags=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, roots=None, include_files=None, exclude_files=None, include_with_any_tags=None,
+ exclude_with_any_tags=None, include_tags=None, exclude_tags=None):
_SelectorConfig.__init__(self, roots=roots, include_files=include_files,
exclude_files=exclude_files,
include_with_any_tags=include_with_any_tags,
@@ -460,6 +462,7 @@ class _JSTestSelector(_Selector):
self._tags = self._test_file_explorer.parse_tag_file("js_test")
def get_tags(self, test_file):
+ """Return tags from test_file."""
file_tags = self._test_file_explorer.jstest_tags(test_file)
if test_file in self._tags:
return list(set(file_tags) | set(self._tags[test_file]))
@@ -471,6 +474,7 @@ class _CppTestSelectorConfig(_SelectorConfig):
def __init__(self, root=config.DEFAULT_INTEGRATION_TEST_LIST, roots=None, include_files=None,
exclude_files=None):
+ """Initialize _CppTestSelectorConfig."""
if roots:
# The 'roots' argument is only present when tests are specified on the command line
# and in that case they take precedence over the tests in the root file.
@@ -485,9 +489,11 @@ class _CppTestSelector(_Selector):
"""_Selector subclass for cpp_integration_test and cpp_unit_test tests."""
def __init__(self, test_file_explorer):
+ """Initialize _CppTestSelector."""
_Selector.__init__(self, test_file_explorer)
def select(self, selector_config):
+ """Return selected tests."""
if selector_config.roots:
# Tests have been specified on the command line. We use them without additional
# filtering.
@@ -500,6 +506,7 @@ class _DbTestSelectorConfig(_SelectorConfig):
"""_Selector config subclass for db_test tests."""
def __init__(self, binary=None, roots=None, include_suites=None):
+ """Initialize _DbTestSelectorConfig."""
_SelectorConfig.__init__(self, roots=roots)
self.include_suites = utils.default_if_none(include_suites, [])
@@ -517,9 +524,11 @@ class _DbTestSelector(_Selector):
"""_Selector subclass for db_test tests."""
def __init__(self, test_file_explorer):
+ """Initialize _DbTestSelector."""
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
def select(self, selector_config):
+ """Return selected tests."""
if selector_config.roots:
roots = selector_config.roots
else:
@@ -550,6 +559,7 @@ class _JsonSchemaTestSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for json_schema_test tests."""
def __init__(self, roots, include_files=None, exclude_files=None):
+ """Initialize _JsonSchemaTestSelectorConfig."""
_SelectorConfig.__init__(self, roots=roots, include_files=include_files,
exclude_files=exclude_files)
@@ -558,6 +568,7 @@ class _SleepTestCaseSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for sleep_test tests."""
def __init__(self, roots):
+ """Initialize _SleepTestCaseSelectorConfig."""
_SelectorConfig.__init__(self, roots=roots)
@@ -565,6 +576,7 @@ class _SleepTestCaseSelector(_Selector):
"""_Selector subclass for sleep_test tests."""
def __init__(self, test_file_explorer):
+ """Initialize _SleepTestCaseSelector."""
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
@@ -596,7 +608,7 @@ _SELECTOR_REGISTRY = {
def filter_tests(test_kind, selector_config, test_file_explorer=_DEFAULT_TEST_FILE_EXPLORER):
- """Filters the tests according to a specified configuration.
+ """Filter the tests according to a specified configuration.
Args:
test_kind: the test kind, one of 'cpp_integration_test', 'cpp_unit_test', 'db_test',
diff --git a/buildscripts/resmokelib/sighandler.py b/buildscripts/resmokelib/sighandler.py
index 5da9ae52ca1..c67d44eb759 100644
--- a/buildscripts/resmokelib/sighandler.py
+++ b/buildscripts/resmokelib/sighandler.py
@@ -1,6 +1,4 @@
-"""
-Utility to support asynchronously signaling the current process.
-"""
+"""Utility to support asynchronously signaling the current process."""
from __future__ import absolute_import
@@ -12,25 +10,23 @@ import threading
import time
import traceback
-_is_windows = (sys.platform == "win32")
-if _is_windows:
+_IS_WINDOWS = (sys.platform == "win32")
+if _IS_WINDOWS:
import win32api
import win32event
-from . import reportfile
-from . import testing
+from . import reportfile # pylint: disable=wrong-import-position
+from . import testing # pylint: disable=wrong-import-position
def register(logger, suites, start_time):
- """
- On Windows, set up an event object to wait for signal, otherwise, register a signal handler
- for the SIGUSR1 signal.
- """
+ """Register an event object to wait for signal, or a signal handler for SIGUSR1."""
- def _handle_sigusr1(signum, frame):
- """
- Signal handler that will dump the stacks of all threads and
- then write out the report file and log suite summaries.
+ def _handle_sigusr1(signum, frame): # pylint: disable=unused-argument
+ """Signal handler for SIGUSR1.
+
+ The handler will dump the stacks of all threads and write out the report file and
+ log suite summaries.
"""
header_msg = "Dumping stacks due to SIGUSR1 signal"
@@ -38,9 +34,10 @@ def register(logger, suites, start_time):
_dump_and_log(header_msg)
def _handle_set_event(event_handle):
- """
- Windows event object handler that will dump the stacks of all threads and then write out
- the report file and log suite summaries.
+ """Event object handler for Windows.
+
+ The handler will dump the stacks of all threads and write out the report file and
+ log suite summaries.
"""
while True:
@@ -58,9 +55,7 @@ def register(logger, suites, start_time):
_dump_and_log(header_msg)
def _dump_and_log(header_msg):
- """
- Dumps the stacks of all threads, writes the report file, and logs the suite summaries.
- """
+ """Dump the stacks of all threads, write report file, and log suite summaries."""
_dump_stacks(logger, header_msg)
reportfile.write(suites)
@@ -68,7 +63,7 @@ def register(logger, suites, start_time):
# On Windows spawn a thread to wait on an event object for signal to dump stacks. For Cygwin
# platforms, we use a signal handler since it supports POSIX signals.
- if _is_windows:
+ if _IS_WINDOWS:
# Create unique event_name.
event_name = "Global\\Mongo_Python_" + str(os.getpid())
@@ -97,14 +92,12 @@ def register(logger, suites, start_time):
def _dump_stacks(logger, header_msg):
- """
- Signal handler that will dump the stacks of all threads.
- """
+ """Signal handler that will dump the stacks of all threads."""
sb = []
sb.append(header_msg)
- frames = sys._current_frames()
+ frames = sys._current_frames() # pylint: disable=protected-access
sb.append("Total threads: %d" % (len(frames)))
sb.append("")
diff --git a/buildscripts/resmokelib/suitesconfig.py b/buildscripts/resmokelib/suitesconfig.py
index 18c52683661..87bc1e1e9f5 100644
--- a/buildscripts/resmokelib/suitesconfig.py
+++ b/buildscripts/resmokelib/suitesconfig.py
@@ -14,9 +14,7 @@ from .. import resmokeconfig
def get_named_suites():
- """
- Returns the list of suites available to execute.
- """
+ """Return the list of suites available to execute."""
# Skip "with_*server" and "no_server" because they do not define any test files to run.
executor_only = {"with_server", "with_external_server", "no_server"}
@@ -26,8 +24,7 @@ def get_named_suites():
def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
- """
- Returns a dict keyed by test name containing all of the suites that will run that test.
+ """Return a dict keyed by test name containing all of the suites that will run that test.
If 'test_kind' is specified then only the mappings for that kind are returned.
Since this iterates through every available suite, it should only be run once.
@@ -59,7 +56,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
def get_suites(suite_files, test_files):
- """Retrieves the Suite instances based on suite configuration files and override parameters.
+ """Retrieve the Suite instances based on suite configuration files and override parameters.
Args:
suite_files: A list of file paths pointing to suite YAML configuration files. For the suites
@@ -93,10 +90,7 @@ def _make_suite_roots(files):
def _get_suite_config(pathname):
- """
- Attempts to read a YAML configuration from 'pathname' that describes
- what tests to run and how to run them.
- """
+ """Attempt to read YAML configuration from 'pathname' for the suite."""
return _get_yaml_config("suite", pathname)
diff --git a/buildscripts/resmokelib/testing/__init__.py b/buildscripts/resmokelib/testing/__init__.py
index e4acff00521..eb58f41f7fe 100644
--- a/buildscripts/resmokelib/testing/__init__.py
+++ b/buildscripts/resmokelib/testing/__init__.py
@@ -1,7 +1,4 @@
-"""
-Extension to the unittest package to support buildlogger and parallel
-test execution.
-"""
+"""Extension to the unittest package to support buildlogger and parallel test execution."""
from __future__ import absolute_import
diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py
index f66515ac8b0..3df4a0a5059 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -1,6 +1,4 @@
-"""
-Driver of the test execution framework.
-"""
+"""Driver of the test execution framework."""
from __future__ import absolute_import
@@ -21,8 +19,7 @@ from ..utils import queue as _queue
class TestSuiteExecutor(object):
- """
- Executes a test suite.
+ """Execute a test suite.
Responsible for setting up and tearing down the fixtures that the
tests execute against.
@@ -30,11 +27,10 @@ class TestSuiteExecutor(object):
_TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
- def __init__(self, exec_logger, suite, config=None, fixture=None, hooks=None,
- archive_instance=None, archive=None):
- """
- Initializes the TestSuiteExecutor with the test suite to run.
- """
+ def __init__( # pylint: disable=too-many-arguments
+ self, exec_logger, suite, config=None, fixture=None, hooks=None, archive_instance=None,
+ archive=None):
+ """Initialize the TestSuiteExecutor with the test suite to run."""
self.logger = exec_logger
if _config.SHELL_CONN_STRING is not None:
@@ -69,8 +65,7 @@ class TestSuiteExecutor(object):
self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)]
def run(self):
- """
- Executes the test suite.
+ """Execute the test suite.
Any exceptions that occur during setting up or tearing down a
fixture are propagated.
@@ -128,9 +123,7 @@ class TestSuiteExecutor(object):
self._suite.return_code = return_code
def _setup_fixtures(self):
- """
- Sets up a fixture for each job.
- """
+ """Set up a fixture for each job."""
# We reset the internal state of the PortAllocator before calling job.fixture.setup() so
# that ports used by the fixture during a test suite run earlier can be reused during this
@@ -140,7 +133,7 @@ class TestSuiteExecutor(object):
for job in self._jobs:
try:
job.fixture.setup()
- except:
+ except: # pylint: disable=bare-except
self.logger.exception("Encountered an error while setting up %s.", job.fixture)
return False
@@ -148,16 +141,14 @@ class TestSuiteExecutor(object):
for job in self._jobs:
try:
job.fixture.await_ready()
- except:
+ except: # pylint: disable=bare-except
self.logger.exception("Encountered an error while waiting for %s to be ready",
job.fixture)
return False
return True
def _run_tests(self, test_queue, teardown_flag):
- """
- Starts a thread for each Job instance and blocks until all of
- the tests are run.
+ """Start a thread for each Job instance and block until all of the tests are run.
Returns a (combined report, user interrupted) pair, where the
report contains the status and timing information of tests run
@@ -170,12 +161,12 @@ class TestSuiteExecutor(object):
try:
# Run each Job instance in its own thread.
for job in self._jobs:
- t = threading.Thread(target=job, args=(test_queue, interrupt_flag),
- kwargs=dict(teardown_flag=teardown_flag))
+ thr = threading.Thread(target=job, args=(test_queue, interrupt_flag),
+ kwargs=dict(teardown_flag=teardown_flag))
# Do not wait for tests to finish executing if interrupted by the user.
- t.daemon = True
- t.start()
- threads.append(t)
+ thr.daemon = True
+ thr.start()
+ threads.append(thr)
# SERVER-24729 Need to stagger when jobs start to reduce I/O load if there
# are many of them. Both the 5 and the 10 are arbitrary.
# Currently only enabled on Evergreen.
@@ -192,8 +183,8 @@ class TestSuiteExecutor(object):
user_interrupted = True
else:
# Only wait for all the Job instances if not interrupted by the user.
- for t in threads:
- t.join()
+ for thr in threads:
+ thr.join()
reports = [job.report for job in self._jobs]
combined_report = _report.TestReport.combine(*reports)
@@ -204,8 +195,7 @@ class TestSuiteExecutor(object):
return (combined_report, user_interrupted)
def _teardown_fixtures(self):
- """
- Tears down all of the fixtures.
+ """Tear down all of the fixtures.
Returns true if all fixtures were torn down successfully, and
false otherwise.
@@ -217,15 +207,13 @@ class TestSuiteExecutor(object):
except errors.ServerFailure as err:
self.logger.warn("Teardown of %s was not successful: %s", job.fixture, err)
success = False
- except:
+ except: # pylint: disable=bare-except
self.logger.exception("Encountered an error while tearing down %s.", job.fixture)
success = False
return success
def _make_fixture(self, job_num, job_logger):
- """
- Creates a fixture for a job.
- """
+ """Create a fixture for a job."""
fixture_config = {}
fixture_class = fixtures.NOOP_FIXTURE_CLASS
@@ -238,10 +226,8 @@ class TestSuiteExecutor(object):
return fixtures.make_fixture(fixture_class, fixture_logger, job_num, **fixture_config)
- def _make_hooks(self, job_num, fixture):
- """
- Creates the hooks for the job's fixture.
- """
+ def _make_hooks(self, fixture):
+ """Create the hooks for the job's fixture."""
hooks = []
@@ -256,22 +242,18 @@ class TestSuiteExecutor(object):
return hooks
def _make_job(self, job_num):
- """
- Returns a Job instance with its own fixture, hooks, and test
- report.
- """
+ """Return a Job instance with its own fixture, hooks, and test report."""
job_logger = self.logger.new_job_logger(self._suite.test_kind, job_num)
fixture = self._make_fixture(job_num, job_logger)
- hooks = self._make_hooks(job_num, fixture)
+ hooks = self._make_hooks(fixture)
report = _report.TestReport(job_logger, self._suite.options)
return _job.Job(job_logger, fixture, hooks, report, self.archival, self._suite.options)
def _make_test_queue(self):
- """
- Returns a queue of TestCase instances.
+ """Return a queue of TestCase instances.
Use a multi-consumer queue instead of a unittest.TestSuite so
that the test cases can be dispatched to multiple threads.
diff --git a/buildscripts/resmokelib/testing/fixtures/__init__.py b/buildscripts/resmokelib/testing/fixtures/__init__.py
index e59a05c9754..87662625f7f 100644
--- a/buildscripts/resmokelib/testing/fixtures/__init__.py
+++ b/buildscripts/resmokelib/testing/fixtures/__init__.py
@@ -1,6 +1,4 @@
-"""
-Fixtures for executing JSTests against.
-"""
+"""Fixture for executing JSTests against."""
from __future__ import absolute_import
@@ -12,4 +10,4 @@ NOOP_FIXTURE_CLASS = _NoOpFixture.REGISTERED_NAME
# We dynamically load all modules in the fixtures/ package so that any Fixture classes declared
# within them are automatically registered.
-_autoloader.load_all_modules(name=__name__, path=__path__)
+_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py
index 0d80907e680..9b4e69c1128 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -1,6 +1,4 @@
-"""
-Interface of the different fixtures for executing JSTests against.
-"""
+"""Interface of the different fixtures for executing JSTests against."""
from __future__ import absolute_import
@@ -16,13 +14,11 @@ from ... import logging
from ... import utils
from ...utils import registry
-_FIXTURES = {}
+_FIXTURES = {} # type: ignore
def make_fixture(class_name, *args, **kwargs):
- """
- Factory function for creating Fixture instances.
- """
+ """Provide factory function for creating Fixture instances."""
if class_name not in _FIXTURES:
raise ValueError("Unknown fixture class '%s'" % class_name)
@@ -30,20 +26,16 @@ def make_fixture(class_name, *args, **kwargs):
class Fixture(object):
- """
- Base class for all fixtures.
- """
+ """Base class for all fixtures."""
- __metaclass__ = registry.make_registry_metaclass(_FIXTURES)
+ __metaclass__ = registry.make_registry_metaclass(_FIXTURES) # type: ignore
# We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute
# is defined for all subclasses of Fixture.
REGISTERED_NAME = "Fixture"
def __init__(self, logger, job_num, dbpath_prefix=None):
- """
- Initializes the fixture with a logger instance.
- """
+ """Initialize the fixture with a logger instance."""
if not isinstance(logger, logging.Logger):
raise TypeError("logger must be a Logger instance")
@@ -61,20 +53,15 @@ class Fixture(object):
self._dbpath_prefix = os.path.join(dbpath_prefix, "job{}".format(self.job_num))
def setup(self):
- """
- Creates the fixture.
- """
+ """Create the fixture."""
pass
def await_ready(self):
- """
- Blocks until the fixture can be used for testing.
- """
+ """Block until the fixture can be used for testing."""
pass
- def teardown(self, finished=False):
- """
- Destroys the fixture.
+ def teardown(self, finished=False): # noqa
+ """Destroy the fixture.
The fixture's logging handlers are closed if 'finished' is true,
which should happen when setup() won't be called again.
@@ -92,9 +79,8 @@ class Fixture(object):
# want the logs to eventually get flushed.
logging.flush.close_later(handler)
- def _do_teardown(self):
- """
- Destroys the fixture.
+ def _do_teardown(self): # noqa
+ """Destroy the fixture.
This method must be implemented by subclasses.
@@ -103,36 +89,32 @@ class Fixture(object):
"""
pass
- def is_running(self):
- """
- Returns true if the fixture is still operating and more tests
- can be run, and false otherwise.
- """
+ def is_running(self): # pylint: disable=no-self-use
+ """Return true if the fixture is still operating and more tests and can be run."""
return True
def get_dbpath_prefix(self):
+ """Return dbpath prefix."""
return self._dbpath_prefix
def get_internal_connection_string(self):
- """
- Returns the connection string for this fixture. This is NOT a
- driver connection string, but a connection string of the format
+ """Return the connection string for this fixture.
+
+ This is NOT a driver connection string, but a connection string of the format
expected by the mongo::ConnectionString class.
"""
raise NotImplementedError("get_connection_string must be implemented by Fixture subclasses")
def get_driver_connection_url(self):
- """
- Return the mongodb connection string as defined here:
+ """Return the mongodb connection string as defined below.
+
https://docs.mongodb.com/manual/reference/connection-string/
"""
raise NotImplementedError(
"get_driver_connection_url must be implemented by Fixture subclasses")
def mongo_client(self, read_preference=pymongo.ReadPreference.PRIMARY, timeout_millis=30000):
- """
- Returns a pymongo.MongoClient connecting to this fixture with a read
- preference of 'read_preference'.
+ """Return a pymongo.MongoClient connecting to this fixture with specified 'read_preference'.
The PyMongo driver will wait up to 'timeout_millis' milliseconds
before concluding that the server is unavailable.
@@ -154,30 +136,23 @@ class Fixture(object):
class ReplFixture(Fixture):
- """
- Base class for all fixtures that support replication.
- """
+ """Base class for all fixtures that support replication."""
- REGISTERED_NAME = registry.LEAVE_UNREGISTERED
+ REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore
AWAIT_REPL_TIMEOUT_MINS = 5
def get_primary(self):
- """
- Returns the primary of a replica set, or the master of a
- master-slave deployment.
- """
+ """Return the primary of a replica set."""
raise NotImplementedError("get_primary must be implemented by ReplFixture subclasses")
def get_secondaries(self):
- """
- Returns a list containing the secondaries of a replica set, or
- the slave of a master-slave deployment.
- """
+ """Return a list containing the secondaries of a replica set."""
raise NotImplementedError("get_secondaries must be implemented by ReplFixture subclasses")
def retry_until_wtimeout(self, insert_fn):
- """
+ """Retry until wtimeout reached.
+
Given a callback function representing an insert operation on
the primary, handle any connection failures, and keep retrying
the operation for up to 'AWAIT_REPL_TIMEOUT_MINS' minutes.
@@ -221,9 +196,11 @@ class NoOpFixture(Fixture):
REGISTERED_NAME = "NoOpFixture"
def get_internal_connection_string(self):
+ """Return the internal connection string."""
return None
def get_driver_connection_url(self):
+ """Return the driver connection URL."""
return None
@@ -231,7 +208,7 @@ class FixtureTeardownHandler(object):
"""A helper class used to teardown nodes inside a cluster and keep track of errors."""
def __init__(self, logger):
- """Initializes a FixtureTeardownHandler.
+ """Initialize a FixtureTeardownHandler.
Args:
logger: A logger to use to log teardown activity.
@@ -241,19 +218,18 @@ class FixtureTeardownHandler(object):
self._message = None
def was_successful(self):
- """Indicates whether the teardowns performed by this instance were all successful."""
+ """Indicate whether the teardowns performed by this instance were all successful."""
return self._success
def get_error_message(self):
- """
- Retrieves the combined error message for all the teardown failures or None if all the
- teardowns were successful.
+ """Retrieve the combined error message for all the teardown failures.
+
+ Return None if all the teardowns were successful.
"""
return self._message
- def teardown(self, fixture, name):
- """
- Tears down the given fixture and logs errors instead of raising a ServerFailure exception.
+ def teardown(self, fixture, name): # noqa: D406,D407,D411,D413
+ """Tear down the given fixture and log errors instead of raising a ServerFailure exception.
Args:
fixture: The fixture to tear down.
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index a554c6a7044..95f3b067cd2 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -1,6 +1,4 @@
-"""
-Replica set fixture for executing JSTests against.
-"""
+"""Replica set fixture for executing JSTests against."""
from __future__ import absolute_import
@@ -17,19 +15,19 @@ from ... import errors
from ... import utils
-class ReplicaSetFixture(interface.ReplFixture):
- """
- Fixture which provides JSTests with a replica set to run against.
- """
+class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-instance-attributes
+ """Fixture which provides JSTests with a replica set to run against."""
# Error response codes copied from mongo/base/error_codes.err.
_NODE_NOT_FOUND = 74
- def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
- dbpath_prefix=None, preserve_dbpath=False, num_nodes=2,
- start_initial_sync_node=False, write_concern_majority_journal_default=None,
- auth_options=None, replset_config_options=None, voting_secondaries=None,
- all_nodes_electable=False, use_replica_set_connection_string=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None,
+ preserve_dbpath=False, num_nodes=2, start_initial_sync_node=False,
+ write_concern_majority_journal_default=None, auth_options=None,
+ replset_config_options=None, voting_secondaries=None, all_nodes_electable=False,
+ use_replica_set_connection_string=None):
+ """Initialize ReplicaSetFixture."""
interface.ReplFixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
@@ -71,7 +69,8 @@ class ReplicaSetFixture(interface.ReplFixture):
self.initial_sync_node = None
self.initial_sync_node_idx = -1
- def setup(self):
+ def setup(self): # pylint: disable=too-many-branches,too-many-statements
+ """Set up the replica set."""
self.replset_name = self.mongod_options.get("replSet", "rs")
if not self.nodes:
@@ -113,7 +112,7 @@ class ReplicaSetFixture(interface.ReplFixture):
"hidden": 1, "votes": 0
})
- config = {"_id": self.replset_name}
+ repl_config = {"_id": self.replset_name}
client = self.nodes[0].mongo_client()
if self.auth_options is not None:
@@ -127,33 +126,33 @@ class ReplicaSetFixture(interface.ReplFixture):
return
if self.write_concern_majority_journal_default is not None:
- config[
+ repl_config[
"writeConcernMajorityJournalDefault"] = self.write_concern_majority_journal_default
else:
server_status = client.admin.command({"serverStatus": 1})
cmd_line_opts = client.admin.command({"getCmdLineOpts": 1})
if not (server_status["storageEngine"]["persistent"] and cmd_line_opts["parsed"].get(
"storage", {}).get("journal", {}).get("enabled", True)):
- config["writeConcernMajorityJournalDefault"] = False
+ repl_config["writeConcernMajorityJournalDefault"] = False
if self.replset_config_options.get("configsvr", False):
- config["configsvr"] = True
+ repl_config["configsvr"] = True
if self.replset_config_options.get("settings"):
replset_settings = self.replset_config_options["settings"]
- config["settings"] = replset_settings
+ repl_config["settings"] = replset_settings
# If secondaries vote, all nodes are not electable, and no election timeout was specified,
# increase the election timeout to 24 hours to prevent elections.
if self.voting_secondaries and not self.all_nodes_electable:
- config.setdefault("settings", {})
- if "electionTimeoutMillis" not in config["settings"]:
- config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000
+ repl_config.setdefault("settings", {})
+ if "electionTimeoutMillis" not in repl_config["settings"]:
+ repl_config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000
# Start up a single node replica set then reconfigure to the correct size (if the config
# contains more than 1 node), so the primary is elected more quickly.
- config["members"] = [members[0]]
- self.logger.info("Issuing replSetInitiate command: %s", config)
- self._configure_repl_set(client, {"replSetInitiate": config})
+ repl_config["members"] = [members[0]]
+ self.logger.info("Issuing replSetInitiate command: %s", repl_config)
+ self._configure_repl_set(client, {"replSetInitiate": repl_config})
self._await_primary()
if self.nodes[1:]:
@@ -161,10 +160,10 @@ class ReplicaSetFixture(interface.ReplFixture):
# command.
for node in self.nodes[1:]:
node.await_ready()
- config["version"] = 2
- config["members"] = members
- self.logger.info("Issuing replSetReconfig command: %s", config)
- self._configure_repl_set(client, {"replSetReconfig": config})
+ repl_config["version"] = 2
+ repl_config["members"] = members
+ self.logger.info("Issuing replSetReconfig command: %s", repl_config)
+ self._configure_repl_set(client, {"replSetReconfig": repl_config})
self._await_secondaries()
def _configure_repl_set(self, client, cmd_obj):
@@ -194,6 +193,7 @@ class ReplicaSetFixture(interface.ReplFixture):
time.sleep(5) # Wait a little bit before trying again.
def await_ready(self):
+ """Wait for replica set tpo be ready."""
self._await_primary()
self._await_secondaries()
@@ -254,6 +254,7 @@ class ReplicaSetFixture(interface.ReplFixture):
raise errors.ServerFailure(teardown_handler.get_error_message())
def is_running(self):
+ """Return True if all nodes in the replica set are running."""
running = all(node.is_running() for node in self.nodes)
if self.initial_sync_node:
@@ -261,7 +262,8 @@ class ReplicaSetFixture(interface.ReplFixture):
return running
- def get_primary(self, timeout_secs=30):
+ def get_primary(self, timeout_secs=30): # pylint: disable=arguments-differ
+ """Return the primary from a replica set."""
if not self.all_nodes_electable:
# The primary is always the first element of the 'nodes' list because all other members
# of the replica set are configured with priority=0.
@@ -299,17 +301,16 @@ class ReplicaSetFixture(interface.ReplFixture):
raise errors.ServerFailure(msg)
def get_secondaries(self):
+ """Return a list of secondaries from the replica set."""
primary = self.get_primary()
return [node for node in self.nodes if node.port != primary.port]
def get_initial_sync_node(self):
+ """Return initila sync node from the replica set."""
return self.initial_sync_node
def _new_mongod(self, index, replset_name):
- """
- Returns a standalone.MongoDFixture configured to be used as a
- replica-set member of 'replset_name'.
- """
+ """Return a standalone.MongoDFixture configured to be used as replica-set member."""
mongod_logger = self._get_logger_for_mongod(index)
mongod_options = self.mongod_options.copy()
@@ -321,9 +322,9 @@ class ReplicaSetFixture(interface.ReplFixture):
mongod_options=mongod_options, preserve_dbpath=self.preserve_dbpath)
def _get_logger_for_mongod(self, index):
- """
- Returns a new logging.Logger instance for use as the primary, secondary, or initial
- sync member of a replica-set.
+ """Return a new logging.Logger instance.
+
+ The instance is used as the primary, secondary, or initial sync member of a replica-set.
"""
if index == self.initial_sync_node_idx:
@@ -339,6 +340,7 @@ class ReplicaSetFixture(interface.ReplFixture):
return self.logger.new_fixture_node_logger(node_name)
def get_internal_connection_string(self):
+ """Return the internal connection string."""
if self.replset_name is None:
raise ValueError("Must call setup() before calling get_internal_connection_string()")
@@ -348,6 +350,7 @@ class ReplicaSetFixture(interface.ReplFixture):
return self.replset_name + "/" + ",".join(conn_strs)
def get_driver_connection_url(self):
+ """Return the driver connection URL."""
if self.replset_name is None:
raise ValueError("Must call setup() before calling get_driver_connection_url()")
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
index 5e94b133708..5e5662fbd7f 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -1,6 +1,4 @@
-"""
-Sharded cluster fixture for executing JSTests against.
-"""
+"""Sharded cluster fixture for executing JSTests against."""
from __future__ import absolute_import
@@ -20,24 +18,19 @@ from ... import utils
from ...utils import registry
-class ShardedClusterFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a sharded cluster to run
- against.
- """
+class ShardedClusterFixture(interface.Fixture): # pylint: disable=too-many-instance-attributes
+ """Fixture which provides JSTests with a sharded cluster to run against."""
_CONFIGSVR_REPLSET_NAME = "config-rs"
_SHARD_REPLSET_NAME_PREFIX = "shard-rs"
- def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None,
- mongod_executable=None, mongod_options=None, dbpath_prefix=None,
- preserve_dbpath=False, num_shards=1, num_rs_nodes_per_shard=None,
- separate_configsvr=True, enable_sharding=None, enable_balancer=True,
- auth_options=None, configsvr_options=None, shard_options=None):
- """
- Initializes ShardedClusterFixture with the different options to
- the mongod and mongos processes.
- """
+ def __init__( # pylint: disable=too-many-arguments,too-many-locals
+ self, logger, job_num, mongos_executable=None, mongos_options=None,
+ mongod_executable=None, mongod_options=None, dbpath_prefix=None, preserve_dbpath=False,
+ num_shards=1, num_rs_nodes_per_shard=None, separate_configsvr=True,
+ enable_sharding=None, enable_balancer=True, auth_options=None, configsvr_options=None,
+ shard_options=None):
+ """Initialize ShardedClusterFixture with different options for the cluster processes."""
interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
@@ -65,6 +58,7 @@ class ShardedClusterFixture(interface.Fixture):
self.shards = []
def setup(self):
+ """Set up the sharded cluster."""
if self.separate_configsvr:
if self.configsvr is None:
self.configsvr = self._new_configsvr()
@@ -87,6 +81,7 @@ class ShardedClusterFixture(interface.Fixture):
shard.setup()
def await_ready(self):
+ """Block until the fixture can be used for testing."""
# Wait for the config server
if self.configsvr is not None:
self.configsvr.await_ready()
@@ -130,9 +125,7 @@ class ShardedClusterFixture(interface.Fixture):
primary.admin.command({"refreshLogicalSessionCacheNow": 1})
def _do_teardown(self):
- """
- Shuts down the sharded cluster.
- """
+ """Shut down the sharded cluster."""
self.logger.info("Stopping all members of the sharded cluster...")
running_at_start = self.is_running()
@@ -158,28 +151,24 @@ class ShardedClusterFixture(interface.Fixture):
raise errors.ServerFailure(teardown_handler.get_error_message())
def is_running(self):
- """
- Returns true if the config server, all shards, and the mongos
- are all still operating, and false otherwise.
- """
+ """Return true if the all nodes in the cluster are all still operating."""
return (self.configsvr is not None and self.configsvr.is_running()
and all(shard.is_running() for shard in self.shards) and self.mongos is not None
and self.mongos.is_running())
def get_internal_connection_string(self):
+ """Return the internal connection string."""
if self.mongos is None:
raise ValueError("Must call setup() before calling get_internal_connection_string()")
return self.mongos.get_internal_connection_string()
def get_driver_connection_url(self):
+ """Return the driver connection URL."""
return "mongodb://" + self.get_internal_connection_string()
def _new_configsvr(self):
- """
- Returns a replicaset.ReplicaSetFixture configured to be used as
- the config server of a sharded cluster.
- """
+ """Return a replicaset.ReplicaSetFixture configured as the config server."""
mongod_logger = self.logger.new_fixture_node_logger("configsvr")
@@ -207,10 +196,7 @@ class ShardedClusterFixture(interface.Fixture):
**configsvr_options)
def _new_rs_shard(self, index, num_rs_nodes_per_shard):
- """
- Returns a replicaset.ReplicaSetFixture configured to be used as a
- shard in a sharded cluster.
- """
+ """Return a replicaset.ReplicaSetFixture configured as a shard in a sharded cluster."""
mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index))
@@ -236,10 +222,7 @@ class ShardedClusterFixture(interface.Fixture):
replset_config_options=replset_config_options, **shard_options)
def _new_standalone_shard(self, index):
- """
- Returns a standalone.MongoDFixture configured to be used as a
- shard in a sharded cluster.
- """
+ """Return a standalone.MongoDFixture configured as a shard in a sharded cluster."""
mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index))
@@ -258,10 +241,7 @@ class ShardedClusterFixture(interface.Fixture):
mongod_options=mongod_options, preserve_dbpath=preserve_dbpath, **shard_options)
def _new_mongos(self):
- """
- Returns a _MongoSFixture configured to be used as the mongos for
- a sharded cluster.
- """
+ """Return a _MongoSFixture configured to be used as the mongos for a sharded cluster."""
mongos_logger = self.logger.new_fixture_node_logger("mongos")
@@ -277,11 +257,9 @@ class ShardedClusterFixture(interface.Fixture):
def _add_shard(self, client, shard):
"""
- Add the specified program as a shard by executing the addShard
- command.
+ Add the specified program as a shard by executing the addShard command.
- See https://docs.mongodb.org/manual/reference/command/addShard
- for more details.
+ See https://docs.mongodb.org/manual/reference/command/addShard for more details.
"""
connection_string = shard.get_internal_connection_string()
@@ -290,13 +268,12 @@ class ShardedClusterFixture(interface.Fixture):
class _MongoSFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a mongos to connect to.
- """
+ """Fixture which provides JSTests with a mongos to connect to."""
- REGISTERED_NAME = registry.LEAVE_UNREGISTERED
+ REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore
def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None):
+ """Initialize _MongoSFixture."""
interface.Fixture.__init__(self, logger, job_num)
@@ -309,6 +286,7 @@ class _MongoSFixture(interface.Fixture):
self.port = None
def setup(self):
+ """Set up the sharded cluster."""
if "port" not in self.mongos_options:
self.mongos_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
self.port = self.mongos_options["port"]
@@ -327,6 +305,7 @@ class _MongoSFixture(interface.Fixture):
self.mongos = mongos
def await_ready(self):
+ """Block until the fixture can be used for testing."""
deadline = time.time() + standalone.MongoDFixture.AWAIT_READY_TIMEOUT_SECS
# Wait until the mongos is accepting connections. The retry logic is necessary to support
@@ -383,13 +362,16 @@ class _MongoSFixture(interface.Fixture):
self.port, self.mongos.pid, exit_code))
def is_running(self):
+ """Return true if the cluster is still operating."""
return self.mongos is not None and self.mongos.poll() is None
def get_internal_connection_string(self):
+ """Return the internal connection string."""
if self.mongos is None:
raise ValueError("Must call setup() before calling get_internal_connection_string()")
return "localhost:%d" % self.port
def get_driver_connection_url(self):
+ """Return the driver connection URL."""
return "mongodb://" + self.get_internal_connection_string()
diff --git a/buildscripts/resmokelib/testing/fixtures/standalone.py b/buildscripts/resmokelib/testing/fixtures/standalone.py
index 0d761478cd8..3cae2e7884f 100644
--- a/buildscripts/resmokelib/testing/fixtures/standalone.py
+++ b/buildscripts/resmokelib/testing/fixtures/standalone.py
@@ -1,6 +1,4 @@
-"""
-Standalone mongod fixture for executing JSTests against.
-"""
+"""Standalone mongod fixture for executing JSTests against."""
from __future__ import absolute_import
@@ -20,15 +18,14 @@ from ... import utils
class MongoDFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a standalone mongod to run
- against.
- """
+ """Fixture which provides JSTests with a standalone mongod to run against."""
AWAIT_READY_TIMEOUT_SECS = 300
- def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
- dbpath_prefix=None, preserve_dbpath=False):
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None,
+ preserve_dbpath=False):
+ """Initialize MongoDFixture with different options for the mongod process."""
interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
@@ -51,6 +48,7 @@ class MongoDFixture(interface.Fixture):
self.port = None
def setup(self):
+ """Set up the mongod."""
if not self.preserve_dbpath:
shutil.rmtree(self._dbpath, ignore_errors=True)
@@ -78,6 +76,7 @@ class MongoDFixture(interface.Fixture):
self.mongod = mongod
def await_ready(self):
+ """Block until the fixture can be used for testing."""
deadline = time.time() + MongoDFixture.AWAIT_READY_TIMEOUT_SECS
# Wait until the mongod is accepting connections. The retry logic is necessary to support
@@ -134,17 +133,20 @@ class MongoDFixture(interface.Fixture):
self.port, self.mongod.pid, exit_code))
def is_running(self):
+ """Return true if the mongod is still operating."""
return self.mongod is not None and self.mongod.poll() is None
def get_dbpath_prefix(self):
- """ Returns the _dbpath, as this is the root of the data directory. """
+ """Return the _dbpath, as this is the root of the data directory."""
return self._dbpath
def get_internal_connection_string(self):
+ """Return the internal connection string."""
if self.mongod is None:
raise ValueError("Must call setup() before calling get_internal_connection_string()")
return "localhost:%d" % self.port
def get_driver_connection_url(self):
+ """Return the driver connection URL."""
return "mongodb://" + self.get_internal_connection_string()
diff --git a/buildscripts/resmokelib/testing/fixtures/yesfixture.py b/buildscripts/resmokelib/testing/fixtures/yesfixture.py
index 6ba62c58e79..618ba8a48cc 100644
--- a/buildscripts/resmokelib/testing/fixtures/yesfixture.py
+++ b/buildscripts/resmokelib/testing/fixtures/yesfixture.py
@@ -1,6 +1,4 @@
-"""
-Fixture for generating lots of log messages.
-"""
+"""Fixture for generating lots of log messages."""
from __future__ import absolute_import
@@ -10,18 +8,18 @@ from . import interface
from ...core import programs
-class YesFixture(interface.Fixture):
- """
- Fixture which spawns potentially several 'yes' executables to generate lots of log messages.
- """
+class YesFixture(interface.Fixture): # pylint: disable=abstract-method
+ """Fixture which spawns several 'yes' executables to generate lots of log messages."""
def __init__(self, logger, job_num, num_instances=1, message_length=100):
+ """Initialize YesFixture."""
interface.Fixture.__init__(self, logger, job_num)
self.__processes = [None] * num_instances
self.__message = "y" * message_length
def setup(self):
+ """Start the yes processes."""
for (i, process) in enumerate(self.__processes):
process = self._make_process(i)
@@ -65,4 +63,5 @@ class YesFixture(interface.Fixture):
return success
def is_running(self):
+ """Return true if the yes processes are running."""
return all(process is not None and process.poll() is None for process in self.__processes)
diff --git a/buildscripts/resmokelib/testing/hook_test_archival.py b/buildscripts/resmokelib/testing/hook_test_archival.py
index 315247261d6..4189dd0ac43 100644
--- a/buildscripts/resmokelib/testing/hook_test_archival.py
+++ b/buildscripts/resmokelib/testing/hook_test_archival.py
@@ -1,6 +1,4 @@
-"""
-Enables supports for archiving tests or hooks.
-"""
+"""Enable support for archiving tests or hooks."""
from __future__ import absolute_import
@@ -13,11 +11,10 @@ from ..utils import globstar
class HookTestArchival(object):
- """
- Archives hooks and tests to S3.
- """
+ """Archive hooks and tests to S3."""
def __init__(self, suite, hooks, archive_instance, archive_config):
+ """Initialize HookTestArchival."""
self.archive_instance = archive_instance
archive_config = utils.default_if_none(archive_config, {})
@@ -45,11 +42,11 @@ class HookTestArchival(object):
self._lock = threading.Lock()
def _should_archive(self, success):
- """ Return True if failed test or 'on_success' is True. """
+ """Return True if failed test or 'on_success' is True."""
return not success or self.on_success
def _archive_hook(self, logger, hook, test, success):
- """ Helper to archive hooks. """
+ """Provide helper to archive hooks."""
hook_match = hook.REGISTERED_NAME in self.hooks
if not hook_match or not self._should_archive(success):
return
@@ -58,7 +55,7 @@ class HookTestArchival(object):
self._archive_hook_or_test(logger, test_name, test)
def _archive_test(self, logger, test, success):
- """ Helper to archive tests. """
+ """Provide helper to archive tests."""
test_name = test.test_name
test_match = False
for arch_test in self.tests:
@@ -72,7 +69,7 @@ class HookTestArchival(object):
self._archive_hook_or_test(logger, test_name, test)
def archive(self, logger, test, success, hook=None):
- """ Archives data files for hooks or tests. """
+ """Archive data files for hooks or tests."""
if not config.ARCHIVE_FILE or not self.archive_instance:
return
if hook:
@@ -81,7 +78,7 @@ class HookTestArchival(object):
self._archive_test(logger, test, success)
def _archive_hook_or_test(self, logger, test_name, test):
- """ Trigger archive of data files for a test or hook. """
+ """Trigger archive of data files for a test or hook."""
with self._lock:
# Test repeat number is how many times the particular test has been archived.
diff --git a/buildscripts/resmokelib/testing/hooks/__init__.py b/buildscripts/resmokelib/testing/hooks/__init__.py
index 87efcd1c964..82772aa25da 100644
--- a/buildscripts/resmokelib/testing/hooks/__init__.py
+++ b/buildscripts/resmokelib/testing/hooks/__init__.py
@@ -12,4 +12,4 @@ from ...utils import autoloader as _autoloader
# We dynamically load all modules in the hooks/ package so that any Hook classes declared
# within them are automatically registered.
-_autoloader.load_all_modules(name=__name__, path=__path__)
+_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore
diff --git a/buildscripts/resmokelib/testing/hooks/check_primary.py b/buildscripts/resmokelib/testing/hooks/check_primary.py
index c6beabbee84..f2624496d54 100644
--- a/buildscripts/resmokelib/testing/hooks/check_primary.py
+++ b/buildscripts/resmokelib/testing/hooks/check_primary.py
@@ -1,6 +1,4 @@
-"""
-Testing hook for verifying that the primary has not stepped down or changed.
-"""
+"""Test hook for verifying that the primary has not stepped down or changed."""
from __future__ import absolute_import
@@ -15,6 +13,7 @@ class CheckPrimary(interface.Hook):
"""Hook that checks that the primary is still primary after the test."""
def __init__(self, hook_logger, rs_fixture):
+ """Initialize CheckPrimary."""
description = "Verify that the primary has not stepped down or changed"
interface.Hook.__init__(self, hook_logger, rs_fixture, description)
@@ -39,9 +38,11 @@ class CheckPrimary(interface.Hook):
raise no_primary_err
def before_test(self, test, test_report):
+ """Before test hook primary."""
self._primary_url = self._get_primary_url()
def after_test(self, test, test_report):
+ """After test hook primary."""
new_primary_url = self._get_primary_url()
if new_primary_url != self._primary_url:
diff --git a/buildscripts/resmokelib/testing/hooks/cleanup.py b/buildscripts/resmokelib/testing/hooks/cleanup.py
index 39011ec90fd..ebbda2f1edb 100644
--- a/buildscripts/resmokelib/testing/hooks/cleanup.py
+++ b/buildscripts/resmokelib/testing/hooks/cleanup.py
@@ -1,6 +1,4 @@
-"""
-Testing hook for cleaning up data files created by the fixture.
-"""
+"""Test hook for cleaning up data files created by the fixture."""
from __future__ import absolute_import
@@ -10,14 +8,15 @@ from . import interface
class CleanEveryN(interface.Hook):
- """
- Restarts the fixture after it has ran 'n' tests.
+ """Restart the fixture after it has ran 'n' tests.
+
On mongod-related fixtures, this will clear the dbpath.
"""
DEFAULT_N = 20
def __init__(self, hook_logger, fixture, n=DEFAULT_N):
+ """Initialize CleanEveryN."""
description = "CleanEveryN (restarts the fixture after running `n` tests)"
interface.Hook.__init__(self, hook_logger, fixture, description)
@@ -27,10 +26,11 @@ class CleanEveryN(interface.Hook):
" the fixture after each test instead of after every %d.", n)
n = 1
- self.n = n
+ self.n = n # pylint: disable=invalid-name
self.tests_run = 0
def after_test(self, test, test_report):
+ """After test cleanup."""
self.tests_run += 1
if self.tests_run < self.n:
return
@@ -42,7 +42,10 @@ class CleanEveryN(interface.Hook):
class CleanEveryNTestCase(interface.DynamicTestCase):
+ """CleanEveryNTestCase class."""
+
def run_test(self):
+ """Execute test hook."""
try:
self.logger.info("%d tests have been run against the fixture, stopping it...",
self._hook.tests_run)
diff --git a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
index 5df43363330..48d476ceb9a 100644
--- a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
+++ b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
@@ -12,7 +12,8 @@ from buildscripts.resmokelib.testing.hooks import interface
class CombineBenchmarkResults(interface.Hook):
- """
+ """CombineBenchmarkResults class.
+
The CombineBenchmarkResults hook combines test results from
individual benchmark files to a single file. This is useful for
generating the json file to feed into the Evergreen performance
@@ -22,6 +23,7 @@ class CombineBenchmarkResults(interface.Hook):
DESCRIPTION = "Combine JSON results from individual benchmarks"
def __init__(self, hook_logger, fixture):
+ """Initialize CombineBenchmarkResults."""
interface.Hook.__init__(self, hook_logger, fixture, CombineBenchmarkResults.DESCRIPTION)
self.report_file = _config.PERF_REPORT_FILE
@@ -35,27 +37,30 @@ class CombineBenchmarkResults(interface.Hook):
def _strftime(time):
return time.strftime("%Y-%m-%dT%H:%M:%SZ")
- def after_test(self, test_case, test_report):
+ def after_test(self, test, test_report):
+ """Update test report."""
if self.report_file is None:
return
- bm_report_path = test_case.report_name()
+ bm_report_path = test.report_name()
with open(bm_report_path, "r") as report_file:
report_dict = json.load(report_file)
self._parse_report(report_dict)
def before_suite(self, test_report):
+ """Set suite start time."""
self.create_time = datetime.datetime.now()
def after_suite(self, test_report):
+ """Update test report."""
if self.report_file is None:
return
self.end_time = datetime.datetime.now()
report = self._generate_perf_plugin_report()
- with open(self.report_file, "w") as f:
- json.dump(report, f)
+ with open(self.report_file, "w") as fh:
+ json.dump(report, fh)
def _generate_perf_plugin_report(self):
"""Format the data to look like a perf plugin report."""
@@ -68,8 +73,7 @@ class CombineBenchmarkResults(interface.Hook):
for name, report in self.benchmark_reports.items():
test_report = {
- "name": name,
- "context": report.context._asdict(),
+ "name": name, "context": report.context._asdict(),
"results": report.generate_perf_plugin_dict()
}
@@ -93,15 +97,13 @@ class CombineBenchmarkResults(interface.Hook):
# Capture information from a Benchmark name in a logical format.
-_BenchmarkName = collections.namedtuple("_BenchmarkName", [
- "base_name",
- "thread_count",
- "statistic_type"
-]);
+_BenchmarkName = collections.namedtuple("_BenchmarkName",
+ ["base_name", "thread_count", "statistic_type"])
class _BenchmarkThreadsReport(object):
- """
+ """_BenchmarkThreadsReport class.
+
Class representation of a report for all thread levels of a single
benchmark test. Each report is designed to correspond to one graph
in the Evergreen perf plugin.
@@ -127,10 +129,11 @@ class _BenchmarkThreadsReport(object):
]
}
"""
+
CONTEXT_FIELDS = [
"date", "cpu_scaling_enabled", "num_cpus", "mhz_per_cpu", "library_build_type"
]
- Context = collections.namedtuple("Context", CONTEXT_FIELDS)
+ Context = collections.namedtuple("Context", CONTEXT_FIELDS) # type: ignore
def __init__(self, context_dict):
self.context = self.Context(**context_dict)
@@ -139,11 +142,11 @@ class _BenchmarkThreadsReport(object):
self.thread_benchmark_map = collections.defaultdict(list)
def add_report(self, bm_name_obj, report):
+ """Add to report."""
self.thread_benchmark_map[bm_name_obj.thread_count].append(report)
def generate_perf_plugin_dict(self):
- """
- Generate perf plugin data points of the following format:
+ """Generate perf plugin data points of the following format.
"1": {
"error_values": [
diff --git a/buildscripts/resmokelib/testing/hooks/dbhash.py b/buildscripts/resmokelib/testing/hooks/dbhash.py
index fdee7bb9f63..40caa5149c6 100644
--- a/buildscripts/resmokelib/testing/hooks/dbhash.py
+++ b/buildscripts/resmokelib/testing/hooks/dbhash.py
@@ -1,6 +1,4 @@
-"""
-Testing hook for verifying data consistency across a replica set.
-"""
+"""Test hook for verifying data consistency across a replica set."""
from __future__ import absolute_import
@@ -10,13 +8,16 @@ from . import jsfile
class CheckReplDBHash(jsfile.DataConsistencyHook):
- """
- Checks that the dbhashes of all non-local databases and non-replicated system collections
+ """Check if the dbhashes match.
+
+ This includes dbhashes for all non-local databases and non-replicated system collections that
match on the primary and secondaries.
"""
- def __init__(self, hook_logger, fixture, shell_options=None):
+ def __init__( # pylint: disable=super-init-not-called
+ self, hook_logger, fixture, shell_options=None):
+ """Initialize CheckReplDBHash."""
description = "Check dbhashes of all replica set or master/slave members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash.js")
- jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
- shell_options=shell_options)
+ jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
+ self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/hooks/initialsync.py b/buildscripts/resmokelib/testing/hooks/initialsync.py
index 905d0a1e913..36ada61ab00 100644
--- a/buildscripts/resmokelib/testing/hooks/initialsync.py
+++ b/buildscripts/resmokelib/testing/hooks/initialsync.py
@@ -1,6 +1,4 @@
-"""
-Testing hook for verifying correctness of initial sync.
-"""
+"""Test hook for verifying correctness of initial sync."""
from __future__ import absolute_import
@@ -18,7 +16,8 @@ from ... import errors
class BackgroundInitialSync(interface.Hook):
- """
+ """BackgroundInitialSync class.
+
After every test, this hook checks if a background node has finished initial sync and if so,
validates it, tears it down, and restarts it.
@@ -33,6 +32,7 @@ class BackgroundInitialSync(interface.Hook):
DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N
def __init__(self, hook_logger, fixture, n=DEFAULT_N, shell_options=None):
+ """Initialize BackgroundInitialSync."""
if not isinstance(fixture, replicaset.ReplicaSetFixture):
raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format(
fixture.__class__.__name__))
@@ -40,12 +40,13 @@ class BackgroundInitialSync(interface.Hook):
description = "Background Initial Sync"
interface.Hook.__init__(self, hook_logger, fixture, description)
- self.n = n
+ self.n = n # pylint: disable=invalid-name
self.tests_run = 0
self.random_restarts = 0
self._shell_options = shell_options
def after_test(self, test, test_report):
+ """After test execution."""
self.tests_run += 1
hook_test_case = BackgroundInitialSyncTestCase.create_after_test(
@@ -55,14 +56,18 @@ class BackgroundInitialSync(interface.Hook):
class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
+ """BackgroundInitialSyncTestCase class."""
JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js")
- def __init__(self, logger, test_name, description, base_test_name, hook, shell_options=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_name, description, base_test_name, hook, shell_options=None):
+ """Initialize BackgroundInitialSyncTestCase."""
jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name,
hook, self.JS_FILENAME, shell_options)
def run_test(self):
+ """Execute test hook."""
sync_node = self.fixture.get_initial_sync_node()
sync_node_conn = sync_node.mongo_client()
@@ -96,7 +101,7 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
if self._hook.random_restarts < 1 and random.random() < 0.2:
self.logger.info(
"randomly restarting initial sync in the middle of initial sync")
- self.__restart_init_sync(sync_node, sync_node_conn)
+ self.__restart_init_sync(sync_node)
self._hook.random_restarts += 1
return
except pymongo.errors.OperationFailure:
@@ -112,10 +117,10 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
# Run data validation and dbhash checking.
self._js_test.run_test()
- self.__restart_init_sync(sync_node, sync_node_conn)
+ self.__restart_init_sync(sync_node)
# Restarts initial sync by shutting down the node, clearing its data, and restarting it.
- def __restart_init_sync(self, sync_node, sync_node_conn):
+ def __restart_init_sync(self, sync_node):
# Tear down and restart the initial sync node to start initial sync again.
sync_node.teardown()
@@ -125,7 +130,8 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
class IntermediateInitialSync(interface.Hook):
- """
+ """IntermediateInitialSync class.
+
This hook accepts a parameter 'n' that specifies a number of tests after which it will start up
a node to initial sync, wait for replication to finish, and then validate the data.
@@ -135,6 +141,7 @@ class IntermediateInitialSync(interface.Hook):
DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N
def __init__(self, hook_logger, fixture, n=DEFAULT_N):
+ """Initialize IntermediateInitialSync."""
if not isinstance(fixture, replicaset.ReplicaSetFixture):
raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format(
fixture.__class__.__name__))
@@ -142,7 +149,7 @@ class IntermediateInitialSync(interface.Hook):
description = "Intermediate Initial Sync"
interface.Hook.__init__(self, hook_logger, fixture, description)
- self.n = n
+ self.n = n # pylint: disable=invalid-name
self.tests_run = 0
def _should_run_after_test(self):
@@ -156,6 +163,7 @@ class IntermediateInitialSync(interface.Hook):
return True
def after_test(self, test, test_report):
+ """After test execution."""
if not self._should_run_after_test():
return
@@ -166,14 +174,18 @@ class IntermediateInitialSync(interface.Hook):
class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase):
+ """IntermediateInitialSyncTestCase class."""
JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js")
- def __init__(self, logger, test_name, description, base_test_name, hook):
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_name, description, base_test_name, hook):
+ """Initialize IntermediateInitialSyncTestCase."""
jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name,
hook, self.JS_FILENAME)
def run_test(self):
+ """Execute test hook."""
sync_node = self.fixture.get_initial_sync_node()
sync_node_conn = sync_node.mongo_client()
diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py
index 877b2cc565f..8e938355001 100644
--- a/buildscripts/resmokelib/testing/hooks/interface.py
+++ b/buildscripts/resmokelib/testing/hooks/interface.py
@@ -1,6 +1,4 @@
-"""
-Interface for customizing the behavior of a test fixture.
-"""
+"""Interface for customizing the behavior of a test fixture."""
from __future__ import absolute_import
@@ -11,13 +9,11 @@ from ... import errors
from ...logging import loggers
from ...utils import registry
-_HOOKS = {}
+_HOOKS = {} # type: ignore
def make_hook(class_name, *args, **kwargs):
- """
- Factory function for creating Hook instances.
- """
+ """Provide factory function for creating Hook instances."""
if class_name not in _HOOKS:
raise ValueError("Unknown hook class '%s'" % class_name)
@@ -26,18 +22,14 @@ def make_hook(class_name, *args, **kwargs):
class Hook(object):
- """
- The common interface all Hooks will inherit from.
- """
+ """Common interface all Hooks will inherit from."""
- __metaclass__ = registry.make_registry_metaclass(_HOOKS)
+ __metaclass__ = registry.make_registry_metaclass(_HOOKS) # type: ignore
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def __init__(self, hook_logger, fixture, description):
- """
- Initializes the Hook with the specified fixture.
- """
+ """Initialize the Hook with the specified fixture."""
if not isinstance(hook_logger, loggers.HookLogger):
raise TypeError("logger must be a HookLogger instance")
@@ -47,42 +39,38 @@ class Hook(object):
self.description = description
def before_suite(self, test_report):
- """
- The test runner calls this exactly once before they start
- running the suite.
- """
+ """Test runner calls this exactly once before they start running the suite."""
pass
def after_suite(self, test_report):
- """
- The test runner calls this exactly once after all tests have
- finished executing. Be sure to reset the behavior back to its
- original state so that it can be run again.
+ """Invoke by test runner calls this exactly once after all tests have finished executing.
+
+ Be sure to reset the behavior back to its original state so that it can be run again.
"""
pass
def before_test(self, test, test_report):
- """
- Each test will call this before it executes.
- """
+ """Each test will call this before it executes."""
pass
def after_test(self, test, test_report):
- """
- Each test will call this after it executes.
- """
+ """Each test will call this after it executes."""
pass
class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
- def __init__(self, logger, test_name, description, base_test_name, hook):
+ """DynamicTestCase class."""
+
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_name, description, base_test_name, hook):
+ """Initialize DynamicTestCase."""
testcase.TestCase.__init__(self, logger, "Hook", test_name)
self.description = description
self._hook = hook
self._base_test_name = base_test_name
def run_dynamic_test(self, test_report):
- """Helper method to run a dynamic test and update the test report."""
+ """Provide helper method to run a dynamic test and update the test report."""
test_report.startTest(self, dynamic=True)
try:
self.run_test()
@@ -102,11 +90,12 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
test_report.stopTest(self)
def as_command(self):
+ """Provide base method."""
return "(dynamic test case)"
@classmethod
def create_before_test(cls, logger, base_test, hook, *args, **kwargs):
- """Creates a hook dynamic test to be run before an existing test."""
+ """Create a hook dynamic test to be run before an existing test."""
base_test_name = base_test.short_name()
test_name = cls._make_test_name(base_test_name, hook)
description = "{} before running '{}'".format(hook.description, base_test_name)
@@ -114,7 +103,7 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
@classmethod
def create_after_test(cls, logger, base_test, hook, *args, **kwargs):
- """Creates a hook dynamic test to be run after an existing test."""
+ """Create a hook dynamic test to be run after an existing test."""
base_test_name = base_test.short_name()
test_name = cls._make_test_name(base_test_name, hook)
description = "{} after running '{}'".format(hook.description, base_test_name)
diff --git a/buildscripts/resmokelib/testing/hooks/jsfile.py b/buildscripts/resmokelib/testing/hooks/jsfile.py
index 76b2de31313..e95d3d6d780 100644
--- a/buildscripts/resmokelib/testing/hooks/jsfile.py
+++ b/buildscripts/resmokelib/testing/hooks/jsfile.py
@@ -1,7 +1,4 @@
-"""
-Interface for customizing the behavior of a test fixture by executing a
-JavaScript file.
-"""
+"""Interface for customizing the behavior of a test fixture by executing a JavaScript file."""
from __future__ import absolute_import
@@ -16,19 +13,23 @@ class JSHook(interface.Hook):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
- def __init__(self, hook_logger, fixture, js_filename, description, shell_options=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, hook_logger, fixture, js_filename, description, shell_options=None):
+ """Initialize JSHook."""
interface.Hook.__init__(self, hook_logger, fixture, description)
self._js_filename = js_filename
self._shell_options = shell_options
def _should_run_after_test(self): # pylint: disable=no-self-use
- """
+ """Provide base callback.
+
Callback that can be overrided by subclasses to indicate if the JavaScript file should be
executed after the current test.
"""
return True
def after_test(self, test, test_report):
+ """After test execution."""
if not self._should_run_after_test():
return
@@ -49,6 +50,7 @@ class DataConsistencyHook(JSHook):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def after_test(self, test, test_report):
+ """After test execution."""
try:
JSHook.after_test(self, test, test_report)
except errors.TestFailure as err:
@@ -58,23 +60,29 @@ class DataConsistencyHook(JSHook):
class DynamicJSTestCase(interface.DynamicTestCase):
"""A dynamic TestCase that runs a JavaScript file."""
- def __init__(self, logger, test_name, description, base_test_name, hook, js_filename,
- shell_options=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_name, description, base_test_name, hook, js_filename,
+ shell_options=None):
+ """Initialize DynamicJSTestCase."""
interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
hook)
self._js_test = jstest.JSTestCase(logger, js_filename, shell_options=shell_options)
def override_logger(self, new_logger):
+ """Override logger."""
interface.DynamicTestCase.override_logger(self, new_logger)
self._js_test.override_logger(new_logger)
def reset_logger(self):
+ """Reset the logger."""
interface.DynamicTestCase.reset_logger(self)
self._js_test.reset_logger()
def configure(self, fixture, *args, **kwargs): # pylint: disable=unused-argument
+ """Configure the fixture."""
interface.DynamicTestCase.configure(self, fixture, *args, **kwargs)
self._js_test.configure(fixture, *args, **kwargs)
def run_test(self):
+ """Execute the test."""
self._js_test.run_test()
diff --git a/buildscripts/resmokelib/testing/hooks/oplog.py b/buildscripts/resmokelib/testing/hooks/oplog.py
index ca9e8d58228..ceb81bb8fd6 100644
--- a/buildscripts/resmokelib/testing/hooks/oplog.py
+++ b/buildscripts/resmokelib/testing/hooks/oplog.py
@@ -1,7 +1,4 @@
-"""
-Testing hook for verifying members of a replica set have matching
-oplogs.
-"""
+"""Test hook for verifying members of a replica set have matching oplogs."""
from __future__ import absolute_import
@@ -10,13 +7,13 @@ import os.path
from . import jsfile
-class CheckReplOplogs(jsfile.DataConsistencyHook):
- """
- Checks that local.oplog.rs matches on the primary and secondaries.
- """
+class CheckReplOplogs(jsfile.DataConsistencyHook): # pylint: disable=non-parent-init-called,super-init-not-called
+ """Check that local.oplog.rs matches on the primary and secondaries."""
- def __init__(self, hook_logger, fixture, shell_options=None):
+ def __init__( # pylint: disable=super-init-not-called
+ self, hook_logger, fixture, shell_options=None):
+ """Initialize CheckReplOplogs."""
description = "Check oplogs of all replica set members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_oplogs.js")
- jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
- shell_options=shell_options)
+ jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
+ self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
index a6924fe52b6..bf3ed131b52 100644
--- a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
+++ b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
@@ -1,7 +1,4 @@
-"""
-Testing hook for verifying correctness of a secondary's behavior during
-an unclean shutdown.
-"""
+"""Test hook for verifying correctness of secondary's behavior during an unclean shutdown."""
from __future__ import absolute_import
@@ -20,15 +17,16 @@ from ... import errors
class PeriodicKillSecondaries(interface.Hook):
- """
- Periodically kills the secondaries in a replica set and verifies
- that they can reach the SECONDARY state without having connectivity
+ """Periodically kills the secondaries in a replica set.
+
+ Also verifies that the secondaries can reach the SECONDARY state without having connectivity
to the primary after an unclean shutdown.
"""
DEFAULT_PERIOD_SECS = 30
def __init__(self, hook_logger, rs_fixture, period_secs=DEFAULT_PERIOD_SECS):
+ """Initialize PeriodicKillSecondaries."""
if not isinstance(rs_fixture, replicaset.ReplicaSetFixture):
raise TypeError("{} either does not support replication or does not support writing to"
" its oplog early".format(rs_fixture.__class__.__name__))
@@ -46,6 +44,7 @@ class PeriodicKillSecondaries(interface.Hook):
self._last_test = None
def after_suite(self, test_report):
+ """Run after suite."""
if self._start_time is not None:
# Ensure that we test killing the secondary and having it reach state SECONDARY after
# being restarted at least once when running the suite.
@@ -54,6 +53,7 @@ class PeriodicKillSecondaries(interface.Hook):
self._run(test_report)
def before_test(self, test, test_report):
+ """Run before test."""
if self._start_time is not None:
# The "rsSyncApplyStop" failpoint is already enabled.
return
@@ -66,6 +66,7 @@ class PeriodicKillSecondaries(interface.Hook):
self._start_time = time.time()
def after_test(self, test, test_report):
+ """Run after test."""
self._last_test = test
# Kill the secondaries and verify that they can reach the SECONDARY state if the specified
@@ -116,12 +117,17 @@ class PeriodicKillSecondaries(interface.Hook):
class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
- def __init__(self, logger, test_name, description, base_test_name, hook, test_report):
+ """PeriodicKillSecondariesTestCase class."""
+
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_name, description, base_test_name, hook, test_report):
+ """Initialize PeriodicKillSecondariesTestCase."""
interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
hook)
self._test_report = test_report
def run_test(self):
+ """Run the test."""
self._kill_secondaries()
self._check_secondaries_and_restart_fixture()
@@ -143,7 +149,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
for secondary in self.fixture.get_secondaries():
# Disable the "rsSyncApplyStop" failpoint on the secondary to have it resume applying
# oplog entries.
- self._hook._disable_rssyncapplystop(secondary)
+ self._hook._disable_rssyncapplystop(secondary) # pylint: disable=protected-access
# Wait a little bit for the secondary to start apply oplog entries so that we are more
# likely to kill the mongod process while it is partway into applying a batch.
@@ -229,7 +235,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
self.fixture.setup()
self.fixture.await_ready()
- def _check_invariants_as_standalone(self, secondary):
+ def _check_invariants_as_standalone(self, secondary): # pylint: disable=too-many-branches
# We remove the --replSet option in order to start the node as a standalone.
replset_name = secondary.mongod_options.pop("replSet")
diff --git a/buildscripts/resmokelib/testing/hooks/stepdown.py b/buildscripts/resmokelib/testing/hooks/stepdown.py
index 9e6e99d6663..2521d65f1af 100644
--- a/buildscripts/resmokelib/testing/hooks/stepdown.py
+++ b/buildscripts/resmokelib/testing/hooks/stepdown.py
@@ -1,6 +1,4 @@
-"""
-Testing hook that periodically makes the primary of a replica set step down.
-"""
+"""Test hook that periodically makes the primary of a replica set step down."""
from __future__ import absolute_import
import collections
@@ -18,15 +16,15 @@ from buildscripts.resmokelib.testing.fixtures import shardedcluster
class ContinuousStepdown(interface.Hook):
- """The ContinuousStepdown hook regularly connects to replica sets and sends a replSetStepDown
- command.
- """
+ """Regularly connect to replica sets and send a replSetStepDown command."""
+
DESCRIPTION = ("Continuous stepdown (steps down the primary of replica sets at regular"
" intervals)")
- def __init__(self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True,
- stepdown_duration_secs=10, stepdown_interval_ms=8000):
- """Initializes the ContinuousStepdown.
+ def __init__( # pylint: disable=too-many-arguments
+ self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True,
+ stepdown_duration_secs=10, stepdown_interval_ms=8000):
+ """Initialize the ContinuousStepdown.
Args:
hook_logger: the logger instance for this hook.
@@ -48,6 +46,7 @@ class ContinuousStepdown(interface.Hook):
self._stepdown_thread = None
def before_suite(self, test_report):
+ """Before suite."""
if not self._rs_fixtures:
self._add_fixture(self._fixture)
self._stepdown_thread = _StepdownThread(self.logger, self._rs_fixtures,
@@ -57,15 +56,18 @@ class ContinuousStepdown(interface.Hook):
self._stepdown_thread.start()
def after_suite(self, test_report):
+ """After suite."""
self.logger.info("Stopping the stepdown thread.")
self._stepdown_thread.stop()
def before_test(self, test, test_report):
+ """Before test."""
self._check_thread()
self.logger.info("Resuming the stepdown thread.")
self._stepdown_thread.resume()
def after_test(self, test, test_report):
+ """After test."""
self._check_thread()
self.logger.info("Pausing the stepdown thread.")
self._stepdown_thread.pause()
@@ -92,8 +94,11 @@ class ContinuousStepdown(interface.Hook):
self._add_fixture(fixture.configsvr)
-class _StepdownThread(threading.Thread):
+class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-attributes
+ """_StepdownThread class."""
+
def __init__(self, logger, rs_fixtures, stepdown_interval_secs, stepdown_duration_secs):
+ """Initialize _StepdownThread."""
threading.Thread.__init__(self, name="StepdownThread")
self.daemon = True
self.logger = logger
@@ -114,6 +119,7 @@ class _StepdownThread(threading.Thread):
self._step_up_stats = collections.Counter()
def run(self):
+ """Execute the thread."""
if not self._rs_fixtures:
self.logger.warning("No replica set on which to run stepdowns.")
return
@@ -135,7 +141,7 @@ class _StepdownThread(threading.Thread):
self._wait(wait_secs)
def stop(self):
- """Stops the thread."""
+ """Stop the thread."""
self._is_stopped_evt.set()
# Unpause to allow the thread to finish.
self.resume()
@@ -145,7 +151,7 @@ class _StepdownThread(threading.Thread):
return self._is_stopped_evt.is_set()
def pause(self):
- """Pauses the thread."""
+ """Pause the thread."""
self._is_resumed_evt.clear()
# Wait until we are no longer executing stepdowns.
self._is_idle_evt.wait()
@@ -153,7 +159,7 @@ class _StepdownThread(threading.Thread):
self._await_primaries()
def resume(self):
- """Resumes the thread."""
+ """Resume the thread."""
self._is_resumed_evt.set()
self.logger.info(
diff --git a/buildscripts/resmokelib/testing/hooks/validate.py b/buildscripts/resmokelib/testing/hooks/validate.py
index 20cf99c1158..3239ddbdc06 100644
--- a/buildscripts/resmokelib/testing/hooks/validate.py
+++ b/buildscripts/resmokelib/testing/hooks/validate.py
@@ -1,7 +1,4 @@
-"""
-Testing hook for verifying the consistency and integrity of collection
-and index data.
-"""
+"""Test hook for verifying the consistency and integrity of collection and index data."""
from __future__ import absolute_import
@@ -11,13 +8,16 @@ from . import jsfile
class ValidateCollections(jsfile.DataConsistencyHook):
- """
- Runs full validation on all collections in all databases on every stand-alone
+ """Run full validation.
+
+ This will run on all collections in all databases on every stand-alone
node, primary replica-set node, or primary shard node.
"""
- def __init__(self, hook_logger, fixture, shell_options=None):
+ def __init__( # pylint: disable=super-init-not-called
+ self, hook_logger, fixture, shell_options=None):
+ """Initialize ValidateCollections."""
description = "Full collection validation"
js_filename = os.path.join("jstests", "hooks", "run_validate_collections.js")
- jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
- shell_options=shell_options)
+ jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
+ self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/job.py b/buildscripts/resmokelib/testing/job.py
index 33831f4e84c..a890eb3a820 100644
--- a/buildscripts/resmokelib/testing/job.py
+++ b/buildscripts/resmokelib/testing/job.py
@@ -1,7 +1,4 @@
-"""
-Enables supports for running tests simultaneously by processing them
-from a multi-consumer queue.
-"""
+"""Enable running tests simultaneously by processing them from a multi-consumer queue."""
from __future__ import absolute_import
@@ -13,14 +10,11 @@ from ..utils import queue as _queue
class Job(object):
- """
- Runs tests from a queue.
- """
+ """Run tests from a queue."""
- def __init__(self, logger, fixture, hooks, report, archival, suite_options):
- """
- Initializes the job with the specified fixture and hooks.
- """
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, fixture, hooks, report, archival, suite_options):
+ """Initialize the job with the specified fixture and hooks."""
self.logger = logger
self.fixture = fixture
@@ -30,9 +24,7 @@ class Job(object):
self.suite_options = suite_options
def __call__(self, queue, interrupt_flag, teardown_flag=None):
- """
- Continuously executes tests from 'queue' and records their
- details in 'report'.
+ """Continuously execute tests from 'queue' and records their details in 'report'.
If 'teardown_flag' is not None, then 'self.fixture.teardown()'
will be called before this method returns. If an error occurs
@@ -47,7 +39,7 @@ class Job(object):
# Stop running tests immediately.
self.logger.error("Received a StopExecution exception: %s.", err)
should_stop = True
- except:
+ except: # pylint: disable=bare-except
# Unknown error, stop execution.
self.logger.exception("Encountered an error during test execution.")
should_stop = True
@@ -64,15 +56,12 @@ class Job(object):
except errors.ServerFailure as err:
self.logger.warn("Teardown of %s was not successful: %s", self.fixture, err)
teardown_flag.set()
- except:
+ except: # pylint: disable=bare-except
self.logger.exception("Encountered an error while tearing down %s.", self.fixture)
teardown_flag.set()
def _run(self, queue, interrupt_flag):
- """
- Calls the before/after suite hooks and continuously executes
- tests from 'queue'.
- """
+ """Call the before/after suite hooks and continuously execute tests from 'queue'."""
for hook in self.hooks:
hook.before_suite(self.report)
@@ -91,9 +80,7 @@ class Job(object):
hook.after_suite(self.report)
def _execute_test(self, test):
- """
- Calls the before/after test hooks and executes 'test'.
- """
+ """Call the before/after test hooks and execute 'test'."""
test.configure(self.fixture, config.NUM_CLIENTS_PER_FIXTURE)
self._run_hooks_before_tests(test)
@@ -101,26 +88,26 @@ class Job(object):
test(self.report)
try:
if self.suite_options.fail_fast and not self.report.wasSuccessful():
- self.logger.info("%s failed, so stopping..." % (test.shortDescription()))
- raise errors.StopExecution("%s failed" % (test.shortDescription()))
+ self.logger.info("%s failed, so stopping..." % (test.short_description()))
+ raise errors.StopExecution("%s failed" % (test.short_description()))
if not self.fixture.is_running():
self.logger.error(
"%s marked as a failure because the fixture crashed during the test.",
- test.shortDescription())
+ test.short_description())
self.report.setFailure(test, return_code=2)
# Always fail fast if the fixture fails.
raise errors.StopExecution("%s not running after %s" % (self.fixture,
- test.shortDescription()))
+ test.short_description()))
finally:
- success = self.report._find_test_info(test).status == "pass"
+ success = self.report.find_test_info(test).status == "pass"
if self.archival:
self.archival.archive(self.logger, test, success)
self._run_hooks_after_tests(test)
def _run_hook(self, hook, hook_function, test):
- """ Helper to run hook and archival. """
+ """Provide helper to run hook and archival."""
try:
success = False
hook_function(test, self.report)
@@ -130,8 +117,7 @@ class Job(object):
self.archival.archive(self.logger, test, success, hook=hook)
def _run_hooks_before_tests(self, test):
- """
- Runs the before_test method on each of the hooks.
+ """Run the before_test method on each of the hooks.
Swallows any TestFailure exceptions if set to continue on
failure, and reraises any other exceptions.
@@ -145,13 +131,13 @@ class Job(object):
except errors.ServerFailure:
self.logger.exception("%s marked as a failure by a hook's before_test.",
- test.shortDescription())
+ test.short_description())
self._fail_test(test, sys.exc_info(), return_code=2)
raise errors.StopExecution("A hook's before_test failed")
except errors.TestFailure:
self.logger.exception("%s marked as a failure by a hook's before_test.",
- test.shortDescription())
+ test.short_description())
self._fail_test(test, sys.exc_info(), return_code=1)
if self.suite_options.fail_fast:
raise errors.StopExecution("A hook's before_test failed")
@@ -164,8 +150,7 @@ class Job(object):
raise
def _run_hooks_after_tests(self, test):
- """
- Runs the after_test method on each of the hooks.
+ """Run the after_test method on each of the hooks.
Swallows any TestFailure exceptions if set to continue on
failure, and reraises any other exceptions.
@@ -179,13 +164,13 @@ class Job(object):
except errors.ServerFailure:
self.logger.exception("%s marked as a failure by a hook's after_test.",
- test.shortDescription())
+ test.short_description())
self.report.setFailure(test, return_code=2)
raise errors.StopExecution("A hook's after_test failed")
except errors.TestFailure:
self.logger.exception("%s marked as a failure by a hook's after_test.",
- test.shortDescription())
+ test.short_description())
self.report.setFailure(test, return_code=1)
if self.suite_options.fail_fast:
raise errors.StopExecution("A hook's after_test failed")
@@ -195,9 +180,7 @@ class Job(object):
raise
def _fail_test(self, test, exc_info, return_code=1):
- """
- Helper to record a test as a failure with the provided return
- code.
+ """Provide helper to record a test as a failure with the provided return code.
This method should not be used if 'test' has already been
started, instead use TestReport.setFailure().
@@ -210,10 +193,9 @@ class Job(object):
@staticmethod
def _drain_queue(queue):
- """
- Removes all elements from 'queue' without actually doing
- anything to them. Necessary to unblock the main thread that is
- waiting for 'queue' to be empty.
+ """Remove all elements from 'queue' without actually doing anything to them.
+
+ Necessary to unblock the main thread that is waiting for 'queue' to be empty.
"""
try:
diff --git a/buildscripts/resmokelib/testing/report.py b/buildscripts/resmokelib/testing/report.py
index f13cfdc9a84..c968449c8a2 100644
--- a/buildscripts/resmokelib/testing/report.py
+++ b/buildscripts/resmokelib/testing/report.py
@@ -1,6 +1,6 @@
-"""
-Extension to the unittest.TestResult to support additional test status
-and timing information for the report.json file.
+"""Extension to the unittest.TestResult.
+
+This is used to support additional test status and timing information for the report.json file.
"""
from __future__ import absolute_import
@@ -14,15 +14,12 @@ from .. import config as _config
from .. import logging
-class TestReport(unittest.TestResult):
- """
- Records test status and timing information.
- """
+# pylint: disable=attribute-defined-outside-init
+class TestReport(unittest.TestResult): # pylint: disable=too-many-instance-attributes
+ """Record test status and timing information."""
def __init__(self, job_logger, suite_options):
- """
- Initializes the TestReport with the buildlogger configuration.
- """
+ """Initialize the TestReport with the buildlogger configuration."""
unittest.TestResult.__init__(self)
@@ -35,8 +32,7 @@ class TestReport(unittest.TestResult):
@classmethod
def combine(cls, *reports):
- """
- Merges the results from multiple TestReport instances into one.
+ """Merge the results from multiple TestReport instances into one.
If the same test is present in multiple reports, then one that
failed or errored is more preferred over one that succeeded.
@@ -54,7 +50,7 @@ class TestReport(unittest.TestResult):
if not isinstance(report, TestReport):
raise TypeError("reports must be a list of TestReport instances")
- with report._lock:
+ with report._lock: # pylint: disable=protected-access
for test_info in report.test_infos:
# If the user triggers a KeyboardInterrupt exception while a test is running,
# then it is possible for 'test_info' to be modified by a job thread later on.
@@ -93,10 +89,8 @@ class TestReport(unittest.TestResult):
return combined_report
- def startTest(self, test, dynamic=False):
- """
- Called immediately before 'test' is run.
- """
+ def startTest(self, test, dynamic=False): # pylint: disable=invalid-name,arguments-differ
+ """Call before 'test' is run."""
unittest.TestResult.startTest(self, test)
@@ -119,15 +113,13 @@ class TestReport(unittest.TestResult):
test.override_logger(test_logger)
- def stopTest(self, test):
- """
- Called immediately after 'test' has run.
- """
+ def stopTest(self, test): # pylint: disable=invalid-name
+ """Call after 'test' has run."""
unittest.TestResult.stopTest(self, test)
with self._lock:
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
test_info.end_time = time.time()
time_taken = test_info.end_time - test_info.start_time
@@ -143,11 +135,8 @@ class TestReport(unittest.TestResult):
# Restore the original logger for the test.
test.reset_logger()
- def addError(self, test, err):
- """
- Called when a non-failureException was raised during the
- execution of 'test'.
- """
+ def addError(self, test, err): # pylint: disable=invalid-name
+ """Call when a non-failureException was raised during the execution of 'test'."""
unittest.TestResult.addError(self, test, err)
@@ -155,18 +144,16 @@ class TestReport(unittest.TestResult):
self.num_errored += 1
# We don't distinguish between test failures and Python errors in Evergreen.
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
test_info.status = "error"
test_info.evergreen_status = "fail"
test_info.return_code = test.return_code
- def setError(self, test):
- """
- Used to change the outcome of an existing test to an error.
- """
+ def setError(self, test): # pylint: disable=invalid-name
+ """Change the outcome of an existing test to an error."""
with self._lock:
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
if test_info.end_time is None:
raise ValueError("stopTest was not called on %s" % (test.basename()))
@@ -181,18 +168,15 @@ class TestReport(unittest.TestResult):
self.num_errored = len(self.get_errored())
self.num_interrupted = len(self.get_interrupted())
- def addFailure(self, test, err):
- """
- Called when a failureException was raised during the execution
- of 'test'.
- """
+ def addFailure(self, test, err): # pylint: disable=invalid-name
+ """Call when a failureException was raised during the execution of 'test'."""
unittest.TestResult.addFailure(self, test, err)
with self._lock:
self.num_failed += 1
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
test_info.status = "fail"
if test_info.dynamic:
# Dynamic tests are used for data consistency checks, so the failures are never
@@ -202,13 +186,11 @@ class TestReport(unittest.TestResult):
test_info.evergreen_status = self.suite_options.report_failure_status
test_info.return_code = test.return_code
- def setFailure(self, test, return_code=1):
- """
- Used to change the outcome of an existing test to a failure.
- """
+ def setFailure(self, test, return_code=1): # pylint: disable=invalid-name
+ """Change the outcome of an existing test to a failure."""
with self._lock:
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
if test_info.end_time is None:
raise ValueError("stopTest was not called on %s" % (test.basename()))
@@ -227,68 +209,51 @@ class TestReport(unittest.TestResult):
self.num_errored = len(self.get_errored())
self.num_interrupted = len(self.get_interrupted())
- def addSuccess(self, test):
- """
- Called when 'test' executed successfully.
- """
+ def addSuccess(self, test): # pylint: disable=invalid-name
+ """Call when 'test' executed successfully."""
unittest.TestResult.addSuccess(self, test)
with self._lock:
self.num_succeeded += 1
- test_info = self._find_test_info(test)
+ test_info = self.find_test_info(test)
test_info.status = "pass"
test_info.evergreen_status = "pass"
test_info.return_code = test.return_code
- def wasSuccessful(self):
- """
- Returns true if all tests executed successfully.
- """
+ def wasSuccessful(self): # pylint: disable=invalid-name
+ """Return true if all tests executed successfully."""
with self._lock:
return self.num_failed == self.num_errored == self.num_interrupted == 0
def get_successful(self):
- """
- Returns the status and timing information of the tests that
- executed successfully.
- """
+ """Return the status and timing information of the tests that executed successfully."""
with self._lock:
return [test_info for test_info in self.test_infos if test_info.status == "pass"]
def get_failed(self):
- """
- Returns the status and timing information of the tests that
- raised a failureException during their execution.
- """
+ """Return the status and timing information of tests that raised a failureException."""
with self._lock:
return [test_info for test_info in self.test_infos if test_info.status == "fail"]
def get_errored(self):
- """
- Returns the status and timing information of the tests that
- raised a non-failureException during their execution.
- """
+ """Return the status and timing information of tests that raised a non-failureException."""
with self._lock:
return [test_info for test_info in self.test_infos if test_info.status == "error"]
def get_interrupted(self):
- """
- Returns the status and timing information of the tests that had
- their execution interrupted.
- """
+ """Return the status and timing information of tests that were execution interrupted."""
with self._lock:
return [test_info for test_info in self.test_infos if test_info.status == "timeout"]
def as_dict(self):
- """
- Return the test result information as a dictionary.
+ """Return the test result information as a dictionary.
Used to create the report.json file.
"""
@@ -318,8 +283,7 @@ class TestReport(unittest.TestResult):
@classmethod
def from_dict(cls, report_dict):
- """
- Returns the test report instance copied from a dict (generated in as_dict).
+ """Return the test report instance copied from a dict (generated in as_dict).
Used when combining reports instances.
"""
@@ -349,9 +313,7 @@ class TestReport(unittest.TestResult):
return report
def reset(self):
- """
- Resets the test report back to its initial state.
- """
+ """Reset the test report back to its initial state."""
with self._lock:
self.test_infos = []
@@ -362,11 +324,8 @@ class TestReport(unittest.TestResult):
self.num_errored = 0
self.num_interrupted = 0
- def _find_test_info(self, test):
- """
- Returns the status and timing information associated with
- 'test'.
- """
+ def find_test_info(self, test):
+ """Return the status and timing information associated with 'test'."""
test_id = test.id()
@@ -379,15 +338,11 @@ class TestReport(unittest.TestResult):
raise ValueError("Details for %s not found in the report" % (test.basename()))
-class _TestInfo(object):
- """
- Holder for the test status and timing information.
- """
+class _TestInfo(object): # pylint: disable=too-many-instance-attributes
+ """Holder for the test status and timing information."""
def __init__(self, test_id, dynamic):
- """
- Initializes the _TestInfo instance.
- """
+ """Initialize the _TestInfo instance."""
self.test_id = test_id
self.dynamic = dynamic
diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py
index 07d72cb65b4..1a5c06b09f1 100644
--- a/buildscripts/resmokelib/testing/suite.py
+++ b/buildscripts/resmokelib/testing/suite.py
@@ -1,7 +1,4 @@
-"""
-Holder for the (test kind, list of tests) pair with additional metadata about when and how they
-execute.
-"""
+"""Holder for the (test kind, list of tests) pair with additional metadata their execution."""
from __future__ import absolute_import
@@ -16,9 +13,10 @@ from .. import selector as _selector
def synchronized(method):
- """Decorator to enfore instance lock ownership when calling the method."""
+ """Provide decorator to enfore instance lock ownership when calling the method."""
def synced(self, *args, **kwargs):
+ """Sync an instance lock."""
lock = getattr(self, "_lock")
with lock:
return method(self, *args, **kwargs)
@@ -26,15 +24,11 @@ def synchronized(method):
return synced
-class Suite(object):
- """
- A suite of tests of a particular kind (e.g. C++ unit tests, dbtests, jstests).
- """
+class Suite(object): # pylint: disable=too-many-instance-attributes
+ """A suite of tests of a particular kind (e.g. C++ unit tests, dbtests, jstests)."""
def __init__(self, suite_name, suite_config, suite_options=_config.SuiteOptions.ALL_INHERITED):
- """
- Initializes the suite with the specified name and configuration.
- """
+ """Initialize the suite with the specified name and configuration."""
self._lock = threading.RLock()
self._suite_name = suite_name
@@ -58,10 +52,7 @@ class Suite(object):
self._partial_reports = None
def _get_tests_for_kind(self, test_kind):
- """
- Returns the tests to run based on the 'test_kind'-specific
- filtering policy.
- """
+ """Return the tests to run based on the 'test_kind'-specific filtering policy."""
test_info = self.get_selector_config()
# The mongos_test doesn't have to filter anything, the test_info is just the arguments to
@@ -79,15 +70,11 @@ class Suite(object):
return tests, excluded
def get_name(self):
- """
- Returns the name of the test suite.
- """
+ """Return the name of the test suite."""
return self._suite_name
def get_display_name(self):
- """
- Returns the name of the test suite with a unique identifier for its SuiteOptions.
- """
+ """Return the name of the test suite with a unique identifier for its SuiteOptions."""
if self.options.description is None:
return self.get_name()
@@ -95,9 +82,7 @@ class Suite(object):
return "{} ({})".format(self.get_name(), self.options.description)
def get_selector_config(self):
- """
- Returns the "selector" section of the YAML configuration.
- """
+ """Return the "selector" section of the YAML configuration."""
if "selector" not in self._suite_config:
return {}
@@ -124,75 +109,62 @@ class Suite(object):
return selector
def get_executor_config(self):
- """
- Returns the "executor" section of the YAML configuration.
- """
+ """Return the "executor" section of the YAML configuration."""
return self._suite_config["executor"]
def get_test_kind_config(self):
- """
- Returns the "test_kind" section of the YAML configuration.
- """
+ """Return the "test_kind" section of the YAML configuration."""
return self._suite_config["test_kind"]
@property
def options(self):
+ """Get the options."""
return self._suite_options.resolve()
def with_options(self, suite_options):
- """
- Returns a Suite instance with the specified resmokelib.config.SuiteOptions.
- """
+ """Return a Suite instance with the specified resmokelib.config.SuiteOptions."""
return Suite(self._suite_name, self._suite_config, suite_options)
@synchronized
def record_suite_start(self):
- """
- Records the start time of the suite.
- """
+ """Record the start time of the suite."""
self._suite_start_time = time.time()
@synchronized
def record_suite_end(self):
- """
- Records the end time of the suite.
- """
+ """Record the end time of the suite."""
self._suite_end_time = time.time()
@synchronized
def record_test_start(self, partial_reports):
- """
- Records the start time of an execution and stores the
- TestReports for currently running jobs.
+ """Record the start time of an execution.
+
+ The result is stored in the TestReports for currently running jobs.
"""
self._test_start_times.append(time.time())
self._partial_reports = partial_reports
@synchronized
def record_test_end(self, report):
- """
- Records the end time of an execution.
- """
+ """Record the end time of an execution."""
self._test_end_times.append(time.time())
self._reports.append(report)
self._partial_reports = None
@synchronized
def get_active_report(self):
- """
- Returns the partial report of the currently running execution, if there is one.
- """
+ """Return the partial report of the currently running execution, if there is one."""
if not self._partial_reports:
return None
return _report.TestReport.combine(*self._partial_reports)
@synchronized
def get_reports(self):
- """
- Returns the list of reports. If there's an execution currently
- in progress, then a report for the partial results is included
- in the returned list.
+ """Return the list of reports.
+
+ If there's an execution currently in progress, then a report for the partial results
+ is included in the returned list.
"""
if self._partial_reports is not None:
@@ -202,9 +174,7 @@ class Suite(object):
@synchronized
def summarize(self, sb):
- """
- Appends a summary of the suite onto the string builder 'sb'.
- """
+ """Append a summary of the suite onto the string builder 'sb'."""
if not self._reports and not self._partial_reports:
sb.append("No tests ran.")
summary = _summary.Summary(0, 0.0, 0, 0, 0, 0)
@@ -234,9 +204,9 @@ class Suite(object):
@synchronized
def summarize_latest(self, sb):
- """
- Returns a summary of the latest execution of the suite and appends a
- summary of that execution onto the string builder 'sb'.
+ """Return a summary of the latest execution of the suite.
+
+ Also append a summary of that execution onto the string builder 'sb'.
If there's an execution currently in progress, then the partial
summary of that execution is appended to 'sb'.
@@ -251,10 +221,10 @@ class Suite(object):
return self._summarize_report(active_report, self._test_start_times[-1], end_time, sb)
def _summarize_repeated(self, sb):
- """
- Returns the summary information of all executions and appends
- each execution's summary onto the string builder 'sb'. Also
- appends information of how many repetitions there were.
+ """Return the summary information of all executions.
+
+ Also append each execution's summary onto the string builder 'sb' and
+ information of how many repetitions there were.
"""
reports = self.get_reports() # Also includes the combined partial reports.
@@ -283,21 +253,19 @@ class Suite(object):
return combined_summary
def _summarize_execution(self, iteration, sb):
- """
- Returns the summary information of the execution given by
- 'iteration' and appends a summary of that execution onto the
- string builder 'sb'.
+ """Return the summary information of the execution given by 'iteration'.
+
+ Also append a summary of that execution onto the string builder 'sb'.
"""
return self._summarize_report(self._reports[iteration], self._test_start_times[iteration],
self._test_end_times[iteration], sb)
def _summarize_report(self, report, start_time, end_time, sb):
- """
- Returns the summary information of the execution given by
- 'report' that started at 'start_time' and finished at
- 'end_time', and appends a summary of that execution onto the
- string builder 'sb'.
+ """Return the summary information of the execution.
+
+ The summary is for 'report' that started at 'start_time' and finished at 'end_time'.
+ Also append a summary of that execution onto the string builder 'sb'.
"""
time_taken = end_time - start_time
@@ -333,6 +301,7 @@ class Suite(object):
@staticmethod
def log_summaries(logger, suites, time_taken):
+ """Log summary of all suites."""
sb = []
sb.append("Summary of all suites: %d suites ran in %0.2f seconds" % (len(suites),
time_taken))
diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py
index cf3649c3e16..dc92e0b5b34 100644
--- a/buildscripts/resmokelib/testing/summary.py
+++ b/buildscripts/resmokelib/testing/summary.py
@@ -1,6 +1,4 @@
-"""
-Holder for summary information about a test suite.
-"""
+"""Holder for summary information about a test suite."""
from __future__ import absolute_import
@@ -12,9 +10,7 @@ Summary = collections.namedtuple(
def combine(summary1, summary2):
- """
- Returns a summary representing the sum of 'summary1' and 'summary2'.
- """
+ """Return a summary representing the sum of 'summary1' and 'summary2'."""
args = []
for i in xrange(len(Summary._fields)):
args.append(summary1[i] + summary2[i])
diff --git a/buildscripts/resmokelib/testing/testcases/__init__.py b/buildscripts/resmokelib/testing/testcases/__init__.py
index a397c04fda6..52869d99de8 100644
--- a/buildscripts/resmokelib/testing/testcases/__init__.py
+++ b/buildscripts/resmokelib/testing/testcases/__init__.py
@@ -1,6 +1,4 @@
-"""
-Package containing subclasses of unittest.TestCase.
-"""
+"""Package containing subclasses of unittest.TestCase."""
from __future__ import absolute_import
@@ -9,4 +7,4 @@ from ...utils import autoloader as _autoloader
# We dynamically load all modules in the testcases/ package so that any TestCase classes declared
# within them are automatically registered.
-_autoloader.load_all_modules(name=__name__, path=__path__)
+_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore
diff --git a/buildscripts/resmokelib/testing/testcases/benchmark_test.py b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
index c30fff20f55..ea506c4f7e5 100644
--- a/buildscripts/resmokelib/testing/testcases/benchmark_test.py
+++ b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for tests using a MongoDB vendored version of Google Benchmark.
-"""
+"""The unittest.TestCase for tests using a MongoDB vendored version of Google Benchmark."""
from __future__ import absolute_import
@@ -12,23 +10,22 @@ from buildscripts.resmokelib.testing.testcases import interface
class BenchmarkTestCase(interface.ProcessTestCase):
- """
- A Benchmark test to execute.
- """
+ """A Benchmark test to execute."""
REGISTERED_NAME = "benchmark_test"
def __init__(self, logger, program_executable, program_options=None):
- """
- Initializes the BenchmarkTestCase with the executable to run.
- """
+ """Initialize the BenchmarkTestCase with the executable to run."""
+
interface.ProcessTestCase.__init__(self, logger, "Benchmark test", program_executable)
parser.validate_benchmark_options()
self.bm_executable = program_executable
self.suite_bm_options = program_options
+ self.bm_options = {}
def configure(self, fixture, *args, **kwargs):
+ """Configure BenchmarkTestCase."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
# 1. Set the default benchmark options, including the out file path, which is based on the
@@ -65,6 +62,7 @@ class BenchmarkTestCase(interface.ProcessTestCase):
self.bm_options = bm_options
def report_name(self):
+ """Return report name."""
return self.bm_executable + ".json"
def _make_process(self):
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
index b4170581821..df6d7c9fa41 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for C++ integration tests.
-"""
+"""The unittest.TestCase for C++ integration tests."""
from __future__ import absolute_import
@@ -10,16 +8,12 @@ from ... import utils
class CPPIntegrationTestCase(interface.ProcessTestCase):
- """
- A C++ integration test to execute.
- """
+ """A C++ integration test to execute."""
REGISTERED_NAME = "cpp_integration_test"
def __init__(self, logger, program_executable, program_options=None):
- """
- Initializes the CPPIntegrationTestCase with the executable to run.
- """
+ """Initialize the CPPIntegrationTestCase with the executable to run."""
interface.ProcessTestCase.__init__(self, logger, "C++ integration test", program_executable)
@@ -27,6 +21,7 @@ class CPPIntegrationTestCase(interface.ProcessTestCase):
self.program_options = utils.default_if_none(program_options, {}).copy()
def configure(self, fixture, *args, **kwargs):
+ """Configure the test case."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
self.program_options["connectionString"] = self.fixture.get_internal_connection_string()
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
index 96f20796911..f9512f8feb5 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for C++ unit tests.
-"""
+"""The unittest.TestCase for C++ unit tests."""
from __future__ import absolute_import
@@ -10,16 +8,12 @@ from ... import utils
class CPPUnitTestCase(interface.ProcessTestCase):
- """
- A C++ unit test to execute.
- """
+ """A C++ unit test to execute."""
REGISTERED_NAME = "cpp_unit_test"
def __init__(self, logger, program_executable, program_options=None):
- """
- Initializes the CPPUnitTestCase with the executable to run.
- """
+ """Initialize the CPPUnitTestCase with the executable to run."""
interface.ProcessTestCase.__init__(self, logger, "C++ unit test", program_executable)
diff --git a/buildscripts/resmokelib/testing/testcases/dbtest.py b/buildscripts/resmokelib/testing/testcases/dbtest.py
index 15316a0f197..295114d60a5 100644
--- a/buildscripts/resmokelib/testing/testcases/dbtest.py
+++ b/buildscripts/resmokelib/testing/testcases/dbtest.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for dbtests.
-"""
+"""The unittest.TestCase for dbtests."""
from __future__ import absolute_import
@@ -15,16 +13,12 @@ from ... import utils
class DBTestCase(interface.ProcessTestCase):
- """
- A dbtest to execute.
- """
+ """A dbtest to execute."""
REGISTERED_NAME = "db_test"
def __init__(self, logger, dbtest_suite, dbtest_executable=None, dbtest_options=None):
- """
- Initializes the DBTestCase with the dbtest suite to run.
- """
+ """Initialize the DBTestCase with the dbtest suite to run."""
interface.ProcessTestCase.__init__(self, logger, "dbtest suite", dbtest_suite)
@@ -35,6 +29,7 @@ class DBTestCase(interface.ProcessTestCase):
self.dbtest_options = utils.default_if_none(dbtest_options, {}).copy()
def configure(self, fixture, *args, **kwargs):
+ """Configure DBTestCase."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
# If a dbpath was specified, then use it as a container for all other dbpaths.
@@ -64,8 +59,7 @@ class DBTestCase(interface.ProcessTestCase):
@staticmethod
def _get_dbpath_prefix():
"""
- Returns the prefix of the dbpath to use for the dbtest
- executable.
+ Return the prefix of the dbpath to use for the dbtest executable.
Order of preference:
1. The --dbpathPrefix specified at the command line.
diff --git a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
index 0d397200cfc..62efc0a5959 100644
--- a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
+++ b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for FSM workloads.
-"""
+"""The unittest.TestCase for FSM workloads."""
from __future__ import absolute_import
@@ -16,7 +14,7 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase):
REGISTERED_NAME = "fsm_workload_test"
def __init__(self, logger, fsm_workload, shell_executable=None, shell_options=None):
- """Initializes the FSMWorkloadTestCase with the FSM workload file."""
+ """Initialize the FSMWorkloadTestCase with the FSM workload file."""
jsrunnerfile.JSRunnerFileTestCase.__init__(
self, logger, "FSM workload", fsm_workload,
@@ -25,6 +23,7 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase):
@property
def fsm_workload(self):
+ """Get the test name."""
return self.test_name
def _populate_test_data(self, test_data):
diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py
index f66abef0f3b..183e69f9d36 100644
--- a/buildscripts/resmokelib/testing/testcases/interface.py
+++ b/buildscripts/resmokelib/testing/testcases/interface.py
@@ -1,6 +1,6 @@
-"""
-Subclass of unittest.TestCase with helpers for spawning a separate
-process to perform the actual test case.
+"""Subclass of unittest.TestCase with helpers for spawning a separate process.
+
+This is used to perform the actual test case.
"""
from __future__ import absolute_import
@@ -12,33 +12,25 @@ import unittest
from ... import logging
from ...utils import registry
-_TEST_CASES = {}
+_TEST_CASES = {} # type: ignore
def make_test_case(test_kind, *args, **kwargs):
- """
- Factory function for creating TestCase instances.
- """
-
+ """Provide factory function for creating TestCase instances."""
if test_kind not in _TEST_CASES:
raise ValueError("Unknown test kind '%s'" % test_kind)
return _TEST_CASES[test_kind](*args, **kwargs)
class TestCase(unittest.TestCase):
- """
- A test case to execute.
- """
+ """A test case to execute."""
- __metaclass__ = registry.make_registry_metaclass(_TEST_CASES)
+ __metaclass__ = registry.make_registry_metaclass(_TEST_CASES) # type: ignore
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def __init__(self, logger, test_kind, test_name):
- """
- Initializes the TestCase with the name of the test.
- """
-
+ """Initialize the TestCase with the name of the test."""
unittest.TestCase.__init__(self, methodName="run_test")
if not isinstance(logger, logging.Logger):
@@ -54,7 +46,7 @@ class TestCase(unittest.TestCase):
# logger is an instance of TestQueueLogger. When the TestCase is created by a hook
# implementation it is an instance of BaseLogger.
self.logger = logger
- # Used to store the logger when overridden by a test logger in Report.startTest().
+ # Used to store the logger when overridden by a test logger in Report.start_test().
self._original_logger = None
self.test_kind = test_kind
@@ -66,32 +58,27 @@ class TestCase(unittest.TestCase):
self.is_configured = False
def long_name(self):
- """
- Returns the path to the test, relative to the current working directory.
- """
+ """Return the path to the test, relative to the current working directory."""
return os.path.relpath(self.test_name)
def basename(self):
- """
- Returns the basename of the test.
- """
+ """Return the basename of the test."""
return os.path.basename(self.test_name)
def short_name(self):
- """
- Returns the basename of the test without the file extension.
- """
+ """Return the basename of the test without the file extension."""
return os.path.splitext(self.basename())[0]
def id(self):
+ """Return the id of the test."""
return self.test_name
- def shortDescription(self):
+ def short_description(self):
+ """Return the short_description of the test."""
return "%s %s" % (self.test_kind, self.test_name)
def override_logger(self, new_logger):
- """
- Overrides this instance's logger with a new logger.
+ """Override this instance's logger with a new logger.
This method is used by the repport to set the test logger.
"""
@@ -100,15 +87,13 @@ class TestCase(unittest.TestCase):
self.logger = new_logger
def reset_logger(self):
- """Resets this instance's logger to its original value."""
+ """Reset this instance's logger to its original value."""
assert self._original_logger, "Logger was not overridden"
self.logger = self._original_logger
self._original_logger = None
def configure(self, fixture, *args, **kwargs): # pylint: disable=unused-argument
- """
- Stores 'fixture' as an attribute for later use during execution.
- """
+ """Store 'fixture' as an attribute for later use during execution."""
if self.is_configured:
raise RuntimeError("configure can only be called once")
@@ -116,15 +101,11 @@ class TestCase(unittest.TestCase):
self.fixture = fixture
def run_test(self):
- """
- Runs the specified test.
- """
+ """Run the specified test."""
raise NotImplementedError("run_test must be implemented by TestCase subclasses")
def as_command(self):
- """
- Returns the command invocation used to run the test.
- """
+ """Return the command invocation used to run the test."""
raise NotImplementedError("as_command must be implemented by TestCase subclasses")
@@ -132,6 +113,7 @@ class ProcessTestCase(TestCase): # pylint: disable=abstract-method
"""Base class for TestCases that executes an external process."""
def run_test(self):
+ """Run the test."""
try:
shell = self._make_process()
self._execute(shell)
@@ -143,29 +125,22 @@ class ProcessTestCase(TestCase): # pylint: disable=abstract-method
raise
def as_command(self):
- """
- Returns the command invocation used to run the test.
- """
+ """Return the command invocation used to run the test."""
return self._make_process().as_command()
def _execute(self, process):
- """
- Runs the specified process.
- """
- self.logger.info("Starting %s...\n%s", self.shortDescription(), process.as_command())
+ """Run the specified process."""
+ self.logger.info("Starting %s...\n%s", self.short_description(), process.as_command())
process.start()
- self.logger.info("%s started with pid %s.", self.shortDescription(), process.pid)
+ self.logger.info("%s started with pid %s.", self.short_description(), process.pid)
self.return_code = process.wait()
if self.return_code != 0:
- raise self.failureException("%s failed" % (self.shortDescription()))
+ raise self.failureException("%s failed" % (self.short_description()))
- self.logger.info("%s finished.", self.shortDescription())
+ self.logger.info("%s finished.", self.short_description())
def _make_process(self):
- """
- Returns a new Process instance that could be used to run the
- test or log the command.
- """
+ """Return a new Process instance that could be used to run the test or log the command."""
raise NotImplementedError("_make_process must be implemented by TestCase subclasses")
diff --git a/buildscripts/resmokelib/testing/testcases/json_schema_test.py b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
index 8380b246bf6..08e5a2d71a8 100644
--- a/buildscripts/resmokelib/testing/testcases/json_schema_test.py
+++ b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for JSON Schema tests.
-"""
+"""The unittest.TestCase for JSON Schema tests."""
from __future__ import absolute_import
@@ -16,7 +14,7 @@ class JSONSchemaTestCase(jsrunnerfile.JSRunnerFileTestCase):
REGISTERED_NAME = "json_schema_test"
def __init__(self, logger, json_filename, shell_executable=None, shell_options=None):
- """Initializes the JSONSchemaTestCase with the JSON test file."""
+ """Initialize the JSONSchemaTestCase with the JSON test file."""
jsrunnerfile.JSRunnerFileTestCase.__init__(
self, logger, "JSON Schema test", json_filename,
@@ -25,6 +23,7 @@ class JSONSchemaTestCase(jsrunnerfile.JSRunnerFileTestCase):
@property
def json_filename(self):
+ """Get the JSON filename."""
return self.test_name
def _populate_test_data(self, test_data):
diff --git a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
index 45a9e5d4944..c2da41faf37 100644
--- a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
+++ b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for tests with a static JavaScript runner file.
-"""
+"""The unittest.TestCase for tests with a static JavaScript runner file."""
from __future__ import absolute_import
@@ -16,9 +14,10 @@ class JSRunnerFileTestCase(interface.ProcessTestCase):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
- def __init__(self, logger, test_kind, test_name, test_runner_file, shell_executable=None,
- shell_options=None):
- """Initializes the JSRunnerFileTestCase with the 'test_name' file."""
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, test_kind, test_name, test_runner_file, shell_executable=None,
+ shell_options=None):
+ """Initialize the JSRunnerFileTestCase with the 'test_name' file."""
interface.ProcessTestCase.__init__(self, logger, test_kind, test_name)
@@ -29,6 +28,7 @@ class JSRunnerFileTestCase(interface.ProcessTestCase):
self.test_runner_file = test_runner_file
def configure(self, fixture, *args, **kwargs):
+ """Configure the js runner."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
global_vars = self.shell_options.get("global_vars", {}).copy()
@@ -40,7 +40,8 @@ class JSRunnerFileTestCase(interface.ProcessTestCase):
self.shell_options["global_vars"] = global_vars
def _populate_test_data(self, test_data):
- """
+ """Provide base method.
+
This method is intended to be overridden by subclasses in order to define the configuration
necessary for the static JavaScript runner file.
"""
diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py
index 747e43fe01f..9386ddf9306 100644
--- a/buildscripts/resmokelib/testing/testcases/jstest.py
+++ b/buildscripts/resmokelib/testing/testcases/jstest.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for JavaScript tests.
-"""
+"""The unittest.TestCase for JavaScript tests."""
from __future__ import absolute_import
@@ -18,16 +16,12 @@ from ...utils import registry
class _SingleJSTestCase(interface.ProcessTestCase):
- """
- A jstest to execute.
- """
+ """A jstest to execute."""
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def __init__(self, logger, js_filename, shell_executable=None, shell_options=None):
- """
- Initializes the _SingleJSTestCase with the JS file to run.
- """
+ """Initialize the _SingleJSTestCase with the JS file to run."""
interface.ProcessTestCase.__init__(self, logger, "JSTest", js_filename)
@@ -38,11 +32,11 @@ class _SingleJSTestCase(interface.ProcessTestCase):
self.shell_options = utils.default_if_none(shell_options, {}).copy()
def configure(self, fixture, *args, **kwargs):
+ """Configure the jstest."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
def configure_shell(self):
- """
- Sets up the global variables for the shell, and data/ directory for the mongod.
+ """Set up the global variables for the shell, and data/ directory for the mongod.
configure_shell() only needs to be called once per test. Therefore if creating multiple
_SingleJSTestCase instances to be run in parallel, only call configure_shell() on one of
@@ -101,11 +95,7 @@ class _SingleJSTestCase(interface.ProcessTestCase):
self.shell_options["process_kwargs"] = process_kwargs
def _get_data_dir(self, global_vars):
- """
- Returns the value that the mongo shell should set for the
- MongoRunner.dataDir property.
- """
-
+ """Return the value that mongo shell should set for the MongoRunner.dataDir property."""
# Command line options override the YAML configuration.
data_dir_prefix = utils.default_if_none(config.DBPATH_PREFIX,
global_vars.get("MongoRunner.dataDir"))
@@ -120,40 +110,38 @@ class _SingleJSTestCase(interface.ProcessTestCase):
class JSTestCase(interface.ProcessTestCase):
- """
- A wrapper for several copies of a SingleJSTest to execute.
- """
+ """A wrapper for several copies of a SingleJSTest to execute."""
REGISTERED_NAME = "js_test"
class ThreadWithException(threading.Thread):
- """
- A wrapper for the thread class that lets us propagate exceptions.
- """
+ """A wrapper for the thread class that lets us propagate exceptions."""
def __init__(self, *args, **kwargs):
+ """Initialize JSTestCase."""
threading.Thread.__init__(self, *args, **kwargs)
self.exc_info = None
def run(self):
+ """Run the jstest."""
try:
threading.Thread.run(self)
- except:
+ except: # pylint: disable=bare-except
self.exc_info = sys.exc_info()
DEFAULT_CLIENT_NUM = 1
def __init__(self, logger, js_filename, shell_executable=None, shell_options=None):
- """
- Initializes the JSTestCase with the JS file to run.
- """
+ """Initialize the JSTestCase with the JS file to run."""
interface.ProcessTestCase.__init__(self, logger, "JSTest", js_filename)
self.num_clients = JSTestCase.DEFAULT_CLIENT_NUM
self.test_case_template = _SingleJSTestCase(logger, js_filename, shell_executable,
shell_options)
- def configure(self, fixture, num_clients=DEFAULT_CLIENT_NUM, *args, **kwargs):
+ def configure( # pylint: disable=arguments-differ,keyword-arg-before-vararg
+ self, fixture, num_clients=DEFAULT_CLIENT_NUM, *args, **kwargs):
+ """Configure the jstest."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
self.num_clients = num_clients
self.test_case_template.configure(fixture, *args, **kwargs)
@@ -161,12 +149,10 @@ class JSTestCase(interface.ProcessTestCase):
def _make_process(self):
# This function should only be called by interface.py's as_command().
- return self.test_case_template._make_process()
+ return self.test_case_template._make_process() # pylint: disable=protected-access
def _get_shell_options_for_thread(self, thread_id):
- """
- Get shell_options with an initialized TestData object for given thread.
- """
+ """Get shell_options with an initialized TestData object for given thread."""
# We give each _SingleJSTestCase its own copy of the shell_options.
shell_options = self.test_case_template.shell_options.copy()
@@ -187,9 +173,7 @@ class JSTestCase(interface.ProcessTestCase):
return shell_options
def _create_test_case_for_thread(self, logger, thread_id):
- """
- Create and configure a _SingleJSTestCase to be run in a separate thread.
- """
+ """Create and configure a _SingleJSTestCase to be run in a separate thread."""
shell_options = self._get_shell_options_for_thread(thread_id)
test_case = _SingleJSTestCase(logger, self.test_case_template.js_filename,
@@ -244,6 +228,7 @@ class JSTestCase(interface.ProcessTestCase):
raise thread.exc_info
def run_test(self):
+ """Execute the test."""
if self.num_clients == 1:
self._run_single_copy()
else:
diff --git a/buildscripts/resmokelib/testing/testcases/mongos_test.py b/buildscripts/resmokelib/testing/testcases/mongos_test.py
index 64b39a32cd9..9914ba8677f 100644
--- a/buildscripts/resmokelib/testing/testcases/mongos_test.py
+++ b/buildscripts/resmokelib/testing/testcases/mongos_test.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for mongos --test.
-"""
+"""The unittest.TestCase for mongos --test."""
from __future__ import absolute_import
@@ -11,16 +9,12 @@ from ... import utils
class MongosTestCase(interface.ProcessTestCase):
- """
- A TestCase which runs a mongos binary with the given parameters.
- """
+ """A TestCase which runs a mongos binary with the given parameters."""
REGISTERED_NAME = "mongos_test"
def __init__(self, logger, mongos_options):
- """
- Initializes the mongos test and saves the options.
- """
+ """Initialize the mongos test and saves the options."""
self.mongos_executable = utils.default_if_none(config.MONGOS_EXECUTABLE,
config.DEFAULT_MONGOS_EXECUTABLE)
@@ -29,9 +23,7 @@ class MongosTestCase(interface.ProcessTestCase):
self.options = mongos_options.copy()
def configure(self, fixture, *args, **kwargs):
- """
- Ensures the --test option is present in the mongos options.
- """
+ """Ensure the --test option is present in the mongos options."""
interface.ProcessTestCase.configure(self, fixture, *args, **kwargs)
# Always specify test option to ensure the mongos will terminate.
diff --git a/buildscripts/resmokelib/testing/testcases/pytest.py b/buildscripts/resmokelib/testing/testcases/pytest.py
index 6c1c343f197..fe62b18e154 100644
--- a/buildscripts/resmokelib/testing/testcases/pytest.py
+++ b/buildscripts/resmokelib/testing/testcases/pytest.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for Python unittests.
-"""
+"""The unittest.TestCase for Python unittests."""
import os
import unittest
@@ -8,13 +6,16 @@ from buildscripts.resmokelib.testing.testcases import interface
class PyTestCase(interface.TestCase):
+ """A python test to execute."""
REGISTERED_NAME = "py_test"
def __init__(self, logger, py_filename):
+ """Initialize PyTestCase."""
interface.TestCase.__init__(self, logger, "PyTest", py_filename)
def run_test(self):
+ """Execute the test."""
suite = unittest.defaultTestLoader.loadTestsFromName(self.test_module_name)
result = unittest.TextTestRunner().run(suite)
if result.failures:
@@ -24,6 +25,7 @@ class PyTestCase(interface.TestCase):
self.return_code = 0
def as_command(self):
+ """Return execute command."""
return "python -m unittest {}".format(self.test_module_name)
@property
diff --git a/buildscripts/resmokelib/testing/testcases/sleeptest.py b/buildscripts/resmokelib/testing/testcases/sleeptest.py
index f521ecea870..163100095d3 100644
--- a/buildscripts/resmokelib/testing/testcases/sleeptest.py
+++ b/buildscripts/resmokelib/testing/testcases/sleeptest.py
@@ -1,6 +1,4 @@
-"""
-unittest.TestCase for sleeping a given amount of time.
-"""
+"""The unittest.TestCase for sleeping a given amount of time."""
from __future__ import absolute_import
@@ -10,13 +8,12 @@ from . import interface
class SleepTestCase(interface.TestCase):
+ """SleepTestCase class."""
REGISTERED_NAME = "sleep_test"
def __init__(self, logger, sleep_duration_secs):
- """
- Initializes the SleepTestCase with the duration to sleep for.
- """
+ """Initialize the SleepTestCase with the duration to sleep for."""
sleep_duration_secs = int(sleep_duration_secs)
@@ -26,11 +23,10 @@ class SleepTestCase(interface.TestCase):
self.__sleep_duration_secs = sleep_duration_secs
def run_test(self):
+ """Execute sleep."""
time.sleep(self.__sleep_duration_secs)
self.return_code = 0
def as_command(self):
- """
- Returns the command invocation used to run the test.
- """
+ """Return the command invocation used to run the test."""
return "sleep {:d}".format(self.__sleep_duration_secs)
diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py
index 57dc8705319..1db36de5cb2 100644
--- a/buildscripts/resmokelib/utils/__init__.py
+++ b/buildscripts/resmokelib/utils/__init__.py
@@ -1,6 +1,4 @@
-"""
-Helper functions.
-"""
+"""Helper functions."""
from __future__ import absolute_import
from __future__ import print_function
@@ -16,9 +14,7 @@ from . import archival
@contextlib.contextmanager
def open_or_use_stdout(filename):
- """
- Opens the specified file for writing, or returns sys.stdout if filename is "-".
- """
+ """Open the specified file for writing, or returns sys.stdout if filename is "-"."""
if filename == "-":
yield sys.stdout
@@ -38,42 +34,32 @@ def open_or_use_stdout(filename):
def default_if_none(value, default):
+ """Set default if value is 'None'."""
return value if value is not None else default
def is_string_list(lst):
- """
- Returns true if 'lst' is a list of strings, and false otherwise.
- """
+ """Return true if 'lst' is a list of strings, and false otherwise."""
return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst)
def is_string_set(value):
- """
- Returns true if 'value' is a set of strings, and false otherwise.
- """
+ """Return true if 'value' is a set of strings, and false otherwise."""
return isinstance(value, set) and all(isinstance(x, basestring) for x in value)
def is_js_file(filename):
- """
- Returns true if 'filename' ends in .js, and false otherwise.
- """
+ """Return true if 'filename' ends in .js, and false otherwise."""
return os.path.splitext(filename)[1] == ".js"
def is_yaml_file(filename):
- """
- Returns true if 'filename' ends in .yml or .yaml, and false
- otherwise.
- """
+ """Return true if 'filename' ends in .yml or .yaml, and false otherwise."""
return os.path.splitext(filename)[1] in (".yaml", ".yml")
def load_yaml_file(filename):
- """
- Attempts to read 'filename' as YAML.
- """
+ """Attempt to read 'filename' as YAML."""
try:
with open(filename, "r") as fp:
return yaml.safe_load(fp)
@@ -82,17 +68,13 @@ def load_yaml_file(filename):
def dump_yaml(value):
- """
- Returns 'value' formatted as YAML.
- """
+ """Return 'value' formatted as YAML."""
# Use block (indented) style for formatting YAML.
return yaml.safe_dump(value, default_flow_style=False).rstrip()
def load_yaml(value):
- """
- Attempts to parse 'value' as YAML.
- """
+ """Attempt to parse 'value' as YAML."""
try:
return yaml.safe_load(value)
except yaml.YAMLError as err:
diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py
index 999e56b99ae..ecbde08c675 100644
--- a/buildscripts/resmokelib/utils/archival.py
+++ b/buildscripts/resmokelib/utils/archival.py
@@ -1,6 +1,4 @@
-"""
-Archival utility.
-"""
+"""Archival utility."""
from __future__ import absolute_import
@@ -30,7 +28,7 @@ ArchiveArgs = collections.namedtuple("ArchiveArgs",
def file_list_size(files):
- """ Return size (in bytes) of all 'files' and their subdirectories. """
+ """Return size (in bytes) of all 'files' and their subdirectories."""
if isinstance(files, str):
files = [files]
file_bytes = 0
@@ -45,7 +43,7 @@ def file_list_size(files):
def directory_size(directory):
- """ Return size (in bytes) of files in 'directory' tree. """
+ """Return size (in bytes) of files in 'directory' tree."""
dir_bytes = 0
for root_dir, _, files in os.walk(unicode(directory)):
for name in files:
@@ -62,20 +60,19 @@ def directory_size(directory):
def free_space(path):
- """ Return file system free space (in bytes) for 'path'. """
+ """Return file system free space (in bytes) for 'path'."""
if _IS_WINDOWS:
dirname = os.path.dirname(path)
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(dirname), None, None, ctypes.pointer(free_bytes))
return free_bytes.value
- else:
- stat = os.statvfs(path)
- return stat.f_bavail * stat.f_bsize
+ stat = os.statvfs(path)
+ return stat.f_bavail * stat.f_bsize
def remove_file(file_name):
- """ Attempts to remove file. Returns status and message. """
+ """Attempt to remove file. Return status and message."""
try:
# File descriptors, on Windows, are inherited by all subprocesses and file removal may fail
# because the file is still open.
@@ -83,18 +80,19 @@ def remove_file(file_name):
os.remove(file_name)
status = 0
message = "Successfully deleted file {}".format(file_name)
- except Exception as err:
+ except Exception as err: # pylint: disable=broad-except
status = 1
message = "Error deleting file {}: {}".format(file_name, err)
return status, message
-class Archival(object):
- """ Class to support file archival to S3."""
+class Archival(object): # pylint: disable=too-many-instance-attributes
+ """Class to support file archival to S3."""
- def __init__(self, logger, archival_json_file="archive.json", limit_size_mb=0, limit_files=0,
- s3_client=None):
- """ Archival init method. """
+ def __init__( # pylint: disable=too-many-arguments
+ self, logger, archival_json_file="archive.json", limit_size_mb=0, limit_files=0,
+ s3_client=None):
+ """Initialize Archival."""
self.archival_json_file = archival_json_file
self.limit_size_mb = limit_size_mb
@@ -134,14 +132,13 @@ class Archival(object):
return boto3.client("s3")
def archive_files_to_s3(self, display_name, input_files, s3_bucket, s3_path):
- """
- Archive 'input_files' to 's3_bucket' and 's3_path'.
+ """Archive 'input_files' to 's3_bucket' and 's3_path'.
Archive is not done if user specified limits are reached. The size limit is
enforced after it has been exceeded, since it can only be calculated after the
tar/gzip has been done.
- Returns status and message, where message contains information if status is non-0.
+ Return status and message, where message contains information if status is non-0.
"""
start_time = time.time()
@@ -168,7 +165,7 @@ class Archival(object):
@staticmethod
def _update_archive_file_wkr(queue, logger):
- """ Worker thread: Update the archival JSON file from 'queue'. """
+ """Worker thread: Update the archival JSON file from 'queue'."""
archival_json = []
while True:
archive_args = queue.get()
@@ -189,7 +186,7 @@ class Archival(object):
@staticmethod
def _upload_to_s3_wkr(queue, archive_file_queue, logger, s3_client):
- """" Worker thread: Upload to S3 from 'queue', dispatch to 'archive_file_queue'. """
+ """Worker thread: Upload to S3 from 'queue', dispatch to 'archive_file_queue'."""
while True:
upload_args = queue.get()
# Exit worker thread when sentinel is received.
@@ -207,7 +204,7 @@ class Archival(object):
upload_completed = True
logger.debug("Upload to S3 completed for %s to bucket %s path %s",
upload_args.local_file, upload_args.s3_bucket, upload_args.s3_path)
- except Exception as err:
+ except Exception as err: # pylint: disable=broad-except
logger.exception("Upload to S3 error %s", err)
if upload_args.delete_file:
@@ -274,6 +271,7 @@ class Archival(object):
return status, message, size_mb
def check_thread(self, thread, expected_alive):
+ """Check if the thread is still active."""
if thread.isAlive() and not expected_alive:
self.logger.warning(
"The %s thread did not complete, some files might not have been uploaded"
@@ -284,7 +282,7 @@ class Archival(object):
" to S3 or archived to %s.", thread.name, self.archival_json_file)
def exit(self, timeout=30):
- """ Waits for worker threads to finish. """
+ """Wait for worker threads to finish."""
# Put sentinel on upload queue to trigger worker thread exit.
self._upload_queue.put(None)
self.check_thread(self._upload_worker, True)
@@ -300,9 +298,9 @@ class Archival(object):
self.archive_time, self.num_files, self.size_mb)
def files_archived_num(self):
- """ Returns the number of the archived files. """
+ """Return the number of the archived files."""
return self.num_files
def files_archived_size_mb(self):
- """ Returns the size of the archived files. """
+ """Return the size of the archived files."""
return self.size_mb
diff --git a/buildscripts/resmokelib/utils/autoloader.py b/buildscripts/resmokelib/utils/autoloader.py
index 1ac58abc892..73b58563451 100644
--- a/buildscripts/resmokelib/utils/autoloader.py
+++ b/buildscripts/resmokelib/utils/autoloader.py
@@ -1,6 +1,4 @@
-"""
-Utility for loading all modules within a package.
-"""
+"""Utility for loading all modules within a package."""
from __future__ import absolute_import
@@ -9,8 +7,7 @@ import pkgutil
def load_all_modules(name, path):
- """
- Dynamically loads all modules in the 'name' package.
+ """Dynamically load all modules in the 'name' package.
This function is useful in combination with the registry.py module
so that any classes declared within the package are automatically
diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py
index 443d75b6b0c..1e016875f94 100644
--- a/buildscripts/resmokelib/utils/globstar.py
+++ b/buildscripts/resmokelib/utils/globstar.py
@@ -1,6 +1,4 @@
-"""
-Filename globbing utility.
-"""
+"""Filename globbing utility."""
from __future__ import absolute_import
@@ -13,18 +11,15 @@ _GLOBSTAR = "**"
_CONTAINS_GLOB_PATTERN = re.compile("[*?[]")
-def is_glob_pattern(s):
- """
- Returns true if 's' represents a glob pattern, and false otherwise.
- """
+def is_glob_pattern(string):
+ """Return true if 'string' represents a glob pattern, and false otherwise."""
# Copied from glob.has_magic().
- return _CONTAINS_GLOB_PATTERN.search(s) is not None
+ return _CONTAINS_GLOB_PATTERN.search(string) is not None
def glob(globbed_pathname):
- """
- Return a list of pathnames matching the 'globbed_pathname' pattern.
+ """Return a list of pathnames matching the 'globbed_pathname' pattern.
In addition to containing simple shell-style wildcards a la fnmatch,
the pattern may also contain globstars ("**"), which is recursively
@@ -35,8 +30,7 @@ def glob(globbed_pathname):
def iglob(globbed_pathname):
- """
- Emit a list of pathnames matching the 'globbed_pathname' pattern.
+ """Emit a list of pathnames matching the 'globbed_pathname' pattern.
In addition to containing simple shell-style wildcards a la fnmatch,
the pattern may also contain globstars ("**"), which is recursively
@@ -79,9 +73,7 @@ def iglob(globbed_pathname):
def _split_path(pathname):
- """
- Return 'pathname' as a list of path components.
- """
+ """Return 'pathname' as a list of path components."""
parts = []
@@ -100,45 +92,43 @@ def _split_path(pathname):
def _canonicalize(parts):
- """
- Return a copy of 'parts' with consecutive "**"s coalesced.
+ """Return a copy of 'parts' with consecutive "**"s coalesced.
+
Raise a ValueError for unsupported uses of "**".
"""
res = []
prev_was_globstar = False
- for p in parts:
- if p == _GLOBSTAR:
+ for part in parts:
+ if part == _GLOBSTAR:
# Skip consecutive **'s
if not prev_was_globstar:
prev_was_globstar = True
- res.append(p)
- elif _GLOBSTAR in p: # a/b**/c or a/**b/c
+ res.append(part)
+ elif _GLOBSTAR in part: # a/b**/c or a/**b/c
raise ValueError("Can only specify glob patterns of the form a/**/b")
else:
prev_was_globstar = False
- res.append(p)
+ res.append(part)
return res
def _find_globstar(parts):
- """
- Return the index of the first occurrence of "**" in 'parts'.
+ """Return the index of the first occurrence of "**" in 'parts'.
+
Return -1 if "**" is not found in the list.
"""
- for (i, p) in enumerate(parts):
- if p == _GLOBSTAR:
- return i
+ for (idx, part) in enumerate(parts):
+ if part == _GLOBSTAR:
+ return idx
return -1
def _list_dir(pathname):
- """
- Return a pair of the subdirectory names and filenames immediately
- contained within the 'pathname' directory.
+ """Return a pair of subdirectory names and filenames contained within the 'pathname' directory.
If 'pathname' does not exist, then None is returned.
"""
@@ -151,9 +141,9 @@ def _list_dir(pathname):
def _expand(pathname):
- """
- Emit tuples of the form ("dir", dirname) and ("file", filename)
- of all directories and files contained within the 'pathname' directory.
+ """Emit tuples of the form ("dir", dirname) and ("file", filename).
+
+ The result is for all directories and files contained within the 'pathname' directory.
"""
res = _list_dir(pathname)
@@ -166,20 +156,20 @@ def _expand(pathname):
if os.path.basename(pathname):
yield ("dir", os.path.join(pathname, ""))
- for f in files:
- path = os.path.join(pathname, f)
+ for fname in files:
+ path = os.path.join(pathname, fname)
yield ("file", path)
- for d in dirs:
- path = os.path.join(pathname, d)
- for x in _expand(path):
- yield x
+ for dname in dirs:
+ path = os.path.join(pathname, dname)
+ for xpath in _expand(path):
+ yield xpath
def _expand_curdir(pathname):
- """
- Emit tuples of the form ("dir", dirname) and ("file", filename)
- of all directories and files contained within the 'pathname' directory.
+ """Emit tuples of the form ("dir", dirname) and ("file", filename).
+
+ The result is for all directories and files contained within the 'pathname' directory.
The returned pathnames omit a "./" prefix.
"""
@@ -193,9 +183,9 @@ def _expand_curdir(pathname):
# Zero expansion
yield ("dir", "")
- for f in files:
- yield ("file", f)
+ for fname in files:
+ yield ("file", fname)
- for d in dirs:
- for x in _expand(d):
- yield x
+ for dname in dirs:
+ for xdir in _expand(dname):
+ yield xdir
diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py
index 43484573fac..67758197c5c 100644
--- a/buildscripts/resmokelib/utils/jscomment.py
+++ b/buildscripts/resmokelib/utils/jscomment.py
@@ -1,6 +1,4 @@
-"""
-Utility for parsing JS comments.
-"""
+"""Utility for parsing JS comments."""
from __future__ import absolute_import
@@ -13,9 +11,9 @@ _JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
def get_tags(pathname):
- """
- Returns the list of tags found in the (JS-style) comments of
- 'pathname'. The definition can span multiple lines, use unquoted,
+ """Return the list of tags found in the (JS-style) comments of 'pathname'.
+
+ The definition can span multiple lines, use unquoted,
single-quoted, or double-quoted strings, and use the '#' character
for inline commenting.
@@ -48,9 +46,10 @@ def get_tags(pathname):
return []
-def _strip_jscomments(s):
- """
- Given a string 's' that represents the contents after the "@tags:"
+def _strip_jscomments(string):
+ """Strip JS comments from a 'string'.
+
+ Given a string 'string' that represents the contents after the "@tags:"
annotation in the JS file, this function returns a string that can
be converted to YAML.
@@ -69,7 +68,7 @@ def _strip_jscomments(s):
yaml_lines = []
- for line in s.splitlines():
+ for line in string.splitlines():
# Remove leading whitespace and symbols that commonly appear in JS comments.
line = line.lstrip("\t ").lstrip("*/")
yaml_lines.append(line)
diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py
index da059ffd852..c77692138b1 100644
--- a/buildscripts/resmokelib/utils/queue.py
+++ b/buildscripts/resmokelib/utils/queue.py
@@ -1,5 +1,4 @@
-"""
-Extension to the Queue.Queue class.
+"""Extension to the Queue.Queue class.
Added support for the join() method to take a timeout. This is necessary
in order for KeyboardInterrupt exceptions to get propagated.
@@ -9,22 +8,18 @@ See https://bugs.python.org/issue1167930 for more details.
from __future__ import absolute_import
-import Queue
+import Queue as _Queue
import time
# Exception that is raised when get_nowait() is called on an empty Queue.
-Empty = Queue.Empty
+Empty = _Queue.Empty
-class Queue(Queue.Queue):
- """
- A multi-producer, multi-consumer queue.
- """
+class Queue(_Queue.Queue):
+ """A multi-producer, multi-consumer queue."""
- def join(self, timeout=None):
- """
- Wait until all items in the queue have been retrieved and processed,
- or until 'timeout' seconds have passed.
+ def join(self, timeout=None): # pylint: disable=arguments-differ
+ """Wait until all items in the queue have been processed or 'timeout' seconds have passed.
The count of unfinished tasks is incremented whenever an item is added
to the queue. The count is decremented whenever task_done() is called
diff --git a/buildscripts/resmokelib/utils/registry.py b/buildscripts/resmokelib/utils/registry.py
index 0a18c556e94..0aa02f4b2b5 100644
--- a/buildscripts/resmokelib/utils/registry.py
+++ b/buildscripts/resmokelib/utils/registry.py
@@ -1,6 +1,6 @@
-"""
-Utility for having class declarations automatically cause a reference to
-the class to be stored along with its name.
+"""Utility for having class declarations.
+
+The registry automatically causes a reference to the class to be stored along with its name.
This pattern enables the associated class to be looked up later by using
its name.
@@ -15,23 +15,19 @@ LEAVE_UNREGISTERED = object()
def make_registry_metaclass(registry_store):
- """
- Returns a new Registry metaclass.
- """
+ """Return a new Registry metaclass."""
if not isinstance(registry_store, dict):
raise TypeError("'registry_store' argument must be a dict")
class Registry(type):
- """
- A metaclass that stores a reference to all registered classes.
- """
+ """A metaclass that stores a reference to all registered classes."""
- def __new__(meta, class_name, base_classes, class_dict):
- """
- Creates and returns a new instance of Registry, which is a
- class named 'class_name' derived from 'base_classes' that
- defines 'class_dict' as additional attributes.
+ def __new__(mcs, class_name, base_classes, class_dict):
+ """Create and returns a new instance of Registry.
+
+ The registry is a class named 'class_name' derived from 'base_classes'
+ that defines 'class_dict' as additional attributes.
The returned class is added to 'registry_store' using
class_dict["REGISTERED_NAME"] as the name, or 'class_name'
@@ -46,7 +42,7 @@ def make_registry_metaclass(registry_store):
"""
registered_name = class_dict.setdefault("REGISTERED_NAME", class_name)
- cls = type.__new__(meta, class_name, base_classes, class_dict)
+ cls = type.__new__(mcs, class_name, base_classes, class_dict)
if registered_name is not LEAVE_UNREGISTERED:
if registered_name in registry_store:
diff --git a/buildscripts/resmokelib/utils/scheduler.py b/buildscripts/resmokelib/utils/scheduler.py
index 753fc217745..04abafcd330 100644
--- a/buildscripts/resmokelib/utils/scheduler.py
+++ b/buildscripts/resmokelib/utils/scheduler.py
@@ -1,7 +1,4 @@
-"""
-A thread-safe version of sched.scheduler since the class wasn't made
-thread-safe until Python 3.3.
-"""
+"""Thread-safe version of sched.scheduler; the class wasn't made thread-safe until Python 3.3."""
from __future__ import absolute_import
@@ -11,34 +8,38 @@ import threading
class Scheduler(sched.scheduler):
- """
- A thread-safe, general purpose event scheduler.
- """
+ """A thread-safe, general purpose event scheduler."""
def __init__(self, timefunc, delayfunc):
+ """Initialize Scheduler."""
sched.scheduler.__init__(self, timefunc, delayfunc)
# We use a recursive lock because sched.scheduler.enter() calls sched.scheduler.enterabs().
self._queue_lock = threading.RLock()
def enterabs(self, time, priority, action, argument):
+ """Enterabs."""
with self._queue_lock:
return sched.scheduler.enterabs(self, time, priority, action, argument)
def enter(self, delay, priority, action, argument):
+ """Enter."""
with self._queue_lock:
return sched.scheduler.enter(self, delay, priority, action, argument)
def cancel(self, event):
+ """Cancel."""
with self._queue_lock:
return sched.scheduler.cancel(self, event)
def empty(self):
+ """Empty."""
with self._queue_lock:
return sched.scheduler.empty(self)
# The implementation for the run() method was adapted from sched.scheduler.run() in Python 3.6.
def run(self):
+ """Run."""
while True:
with self._queue_lock:
if not self._queue:
@@ -62,5 +63,6 @@ class Scheduler(sched.scheduler):
@property
def queue(self):
+ """Get Queue."""
with self._queue_lock:
return sched.scheduler.queue.fget(self)
diff --git a/buildscripts/scons.py b/buildscripts/scons.py
index b7da54ed99b..8c7b9448c51 100755
--- a/buildscripts/scons.py
+++ b/buildscripts/scons.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python2
+"""Scons module."""
from __future__ import print_function
@@ -7,20 +8,20 @@ import sys
SCONS_VERSION = os.environ.get('SCONS_VERSION', "2.5.0")
-mongodb_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
-scons_dir = os.path.join(mongodb_root, 'src', 'third_party', 'scons-' + SCONS_VERSION,
+MONGODB_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+SCONS_DIR = os.path.join(MONGODB_ROOT, 'src', 'third_party', 'scons-' + SCONS_VERSION,
'scons-local-' + SCONS_VERSION)
-if not os.path.exists(scons_dir):
- print("Could not find SCons in '%s'" % (scons_dir))
+if not os.path.exists(SCONS_DIR):
+ print("Could not find SCons in '%s'" % (SCONS_DIR))
sys.exit(1)
-sys.path = [scons_dir] + sys.path
+sys.path = [SCONS_DIR] + sys.path
try:
import SCons.Script
except ImportError:
- print("Could not find SCons in '%s'" % (scons_dir))
+ print("Could not find SCons in '%s'" % (SCONS_DIR))
sys.exit(1)
SCons.Script.main()
diff --git a/buildscripts/scons_cache_prune.py b/buildscripts/scons_cache_prune.py
index 21b5582bbed..74631a9d4de 100644
--- a/buildscripts/scons_cache_prune.py
+++ b/buildscripts/scons_cache_prune.py
@@ -1,6 +1,8 @@
#!/USSR/bin/python
# encoding: utf-8
"""
+Prune the scons cache.
+
This script, borrowed from some waf code, with a stand alone interface, provides a way to
remove files from the cache on an LRU (least recently used) basis to prevent the scons cache
from outgrowing the storage capacity.
@@ -16,14 +18,15 @@ import os
import shutil
logging.basicConfig(level=logging.INFO)
-logger = logging.getLogger("scons.cache.prune.lru")
+LOGGER = logging.getLogger("scons.cache.prune.lru") # type: ignore
GIGBYTES = 1024 * 1024 * 1024
-cache_item = collections.namedtuple("CacheContents", ["path", "time", "size"])
+CacheItem = collections.namedtuple("CacheContents", ["path", "time", "size"])
def collect_cache_contents(cache_path):
+ """Collect the cache contents."""
# map folder names to timestamps
contents = []
total = 0
@@ -36,12 +39,12 @@ def collect_cache_contents(cache_path):
for file_name in os.listdir(path):
file_path = os.path.join(path, file_name)
if os.path.isdir(file_path):
- logger.warning("cache item {0} is a directory and not a file. "
- "The cache may be currupt.".format(file_path))
+ LOGGER.warning("cache item %s is a directory and not a file. "
+ "The cache may be corrupt.", file_path)
continue
- item = cache_item(path=file_path, time=os.stat(file_path).st_atime,
- size=os.stat(file_path).st_size)
+ item = CacheItem(path=file_path, time=os.stat(file_path).st_atime,
+ size=os.stat(file_path).st_size)
total += item.size
@@ -51,6 +54,7 @@ def collect_cache_contents(cache_path):
def prune_cache(cache_path, cache_size_gb, clean_ratio):
+ """Prune the cache."""
# This function is taken as is from waf, with the interface cleaned up and some minor
# stylistic changes.
@@ -58,10 +62,10 @@ def prune_cache(cache_path, cache_size_gb, clean_ratio):
(total_size, contents) = collect_cache_contents(cache_path)
- logger.info("cache size {0}, quota {1}".format(total_size, cache_size))
+ LOGGER.info("cache size %d, quota %d", total_size, cache_size)
if total_size >= cache_size:
- logger.info("trimming the cache since {0} > {0}".format(total_size, cache_size))
+ LOGGER.info("trimming the cache since %d > %d", total_size, cache_size)
# make a list to sort the folders' by timestamp
contents.sort(key=lambda x: x.time, reverse=True) # sort by timestamp
@@ -69,9 +73,9 @@ def prune_cache(cache_path, cache_size_gb, clean_ratio):
# now that the contents of things to delete is sorted by timestamp in reverse order, we
# just delete things until the total_size falls below the target cache size ratio.
while total_size >= cache_size * clean_ratio:
- if len(contents) == 0:
+ if not contents:
shutil.rmtree(cache_path)
- logger.error("cache size is over quota, and there are no files in "
+ LOGGER.error("cache size is over quota, and there are no files in "
"the queue to delete. Removed the entire cache.")
return False
@@ -80,27 +84,27 @@ def prune_cache(cache_path, cache_size_gb, clean_ratio):
to_remove = cache_item.path + ".del"
try:
os.rename(cache_item.path, to_remove)
- except:
+ except Exception: # pylint: disable=broad-except
# another process may have already cleared the file.
pass
else:
try:
os.remove(to_remove)
- logger.info("removed file from cache: {0}".format(cache_item.path))
+ LOGGER.info("removed file from cache: %s", cache_item.path)
total_size -= cache_item.size
- except Exception as e:
+ except Exception as err: # pylint: disable=broad-except
# this should not happen, but who knows?
- logger.error("error [{0}, {1}] removing file '{2}', "
- "please report this error".format(e, type(e), to_remove))
+ LOGGER.error("error [%s, %s] removing file '%s', "
+ "please report this error", err, type(err), to_remove)
- logger.info("total cache size at the end of pruning: {0}".format(total_size))
- return True
- else:
- logger.info("cache size ({0}) is currently within boundaries".format(total_size))
+ LOGGER.info("total cache size at the end of pruning: %d", total_size)
return True
+ LOGGER.info("cache size (%d) is currently within boundaries", total_size)
+ return True
def main():
+ """Execute Main entry."""
parser = argparse.ArgumentParser(description="SCons cache pruning tool")
parser.add_argument("--cache-dir", "-d", default=None, help="path to the cache directory.")
@@ -114,14 +118,14 @@ def main():
args = parser.parse_args()
if args.cache_dir is None or not os.path.isdir(args.cache_dir):
- logger.error("must specify a valid cache path, [{0}]".format(args.cache_dir))
+ LOGGER.error("must specify a valid cache path, [%s]", args.cache_dir)
exit(1)
ok = prune_cache(cache_path=args.cache_dir, cache_size_gb=args.cache_size,
clean_ratio=args.prune_ratio)
if not ok:
- logger.error("encountered error cleaning the cache. exiting.")
+ LOGGER.error("encountered error cleaning the cache. exiting.")
exit(1)
diff --git a/buildscripts/setup_multiversion_mongodb.py b/buildscripts/setup_multiversion_mongodb.py
index 8027f268369..7b5c5ca9e57 100755
--- a/buildscripts/setup_multiversion_mongodb.py
+++ b/buildscripts/setup_multiversion_mongodb.py
@@ -23,7 +23,7 @@ import requests
import requests.exceptions
-def dump_stacks(_signal_num, _frame):
+def dump_stacks(_signal_num, _frame): # pylint: disable=unused-argument
"""Dump stacks when SIGUSR1 is received."""
print("======================================")
print("DUMPING STACKS due to SIGUSR1 signal")
@@ -32,19 +32,21 @@ def dump_stacks(_signal_num, _frame):
print("Total Threads: {:d}".format(len(threads)))
- for tid, stack in sys._current_frames().items():
+ for tid, stack in sys._current_frames().items(): # pylint: disable=protected-access
print("Thread {:d}".format(tid))
print("".join(traceback.format_stack(stack)))
print("======================================")
def get_version_parts(version, for_sorting=False):
- """Returns a list containing the components of the version string
- as numeric values. This function can be used for numeric sorting
+ """Return a list containing the components of the version string as numeric values.
+
+ This function can be used for numeric sorting
of version strings such as '2.6.0-rc1' and '2.4.0' when the
- 'for_sorting' parameter is specified as true."""
+ 'for_sorting' parameter is specified as true.
+ """
- RC_OFFSET = -100
+ rc_offset = -100
version_parts = re.split(r"\.|-", version)
if version_parts[-1] == "pre":
@@ -56,7 +58,7 @@ def get_version_parts(version, for_sorting=False):
# RC versions are weighted down to allow future RCs and general
# releases to be sorted in ascending order (e.g., 2.6.0-rc1,
# 2.6.0-rc2, 2.6.0).
- version_parts[-1] = int(version_parts[-1][2:]) + RC_OFFSET
+ version_parts[-1] = int(version_parts[-1][2:]) + rc_offset
elif version_parts[0].startswith("v") and version_parts[-1] == "latest":
version_parts[0] = version_parts[0][1:]
# The "<branchname>-latest" versions are weighted the highest when a particular major
@@ -73,7 +75,7 @@ def get_version_parts(version, for_sorting=False):
def download_file(url, file_name, download_retries=5):
- """Returns True if download was successful. Raises error if download fails."""
+ """Return True if download was successful. Raises error if download fails."""
while download_retries > 0:
@@ -111,10 +113,12 @@ def download_file(url, file_name, download_retries=5):
raise Exception("Unknown download problem for {} to file {}".format(url, file_name))
-class MultiVersionDownloader(object):
+class MultiVersionDownloader(object): # pylint: disable=too-many-instance-attributes
"""Class to support multiversion downloads."""
- def __init__(self, install_dir, link_dir, edition, platform, architecture, use_latest=False):
+ def __init__( # pylint: disable=too-many-arguments
+ self, install_dir, link_dir, edition, platform, architecture, use_latest=False):
+ """Initialize MultiVersionDownloader."""
self.install_dir = install_dir
self.link_dir = link_dir
self.edition = edition.lower()
@@ -128,27 +132,27 @@ class MultiVersionDownloader(object):
@property
def generic_links(self):
- """Returns a list of generic links."""
+ """Get a list of generic links."""
if self._generic_links is None:
self._links, self._generic_links = self.download_links()
return self._generic_links
@property
def links(self):
- """Returns a list of links."""
+ """Get a list of links."""
if self._links is None:
self._links, self._generic_links = self.download_links()
return self._links
@staticmethod
def is_major_minor_version(version):
- """Returns True if the version is specified as M.m."""
+ """Return True if the version is specified as M.m."""
if re.match(r"^\d+?\.\d+?$", version) is None:
return False
return True
def download_links(self):
- """Returns the download and generic download links."""
+ """Return the download and generic download links."""
temp_file = tempfile.mktemp()
download_file("https://downloads.mongodb.org/full.json", temp_file)
with open(temp_file) as file_handle:
@@ -179,15 +183,17 @@ class MultiVersionDownloader(object):
return links, generic_links
def download_install(self, version):
- """Downloads and installs the version specified."""
- download_file = self.download_version(version)
- if download_file:
- installed_dir = self.uncompress_download(download_file)
+ """Download and install the version specified."""
+ dl_file = self.download_version(version)
+ if dl_file:
+ installed_dir = self.uncompress_download(dl_file)
self.symlink_version(version, installed_dir)
- def download_version(self, version):
- """Downloads the version specified and returns file location.
- If no download occurs, file location is None."""
+ def download_version(self, version): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ """Download the version specified and return file location.
+
+ If no download occurs, file location is None.
+ """
try:
os.makedirs(self.install_dir)
@@ -215,7 +221,7 @@ class MultiVersionDownloader(object):
continue
urls.append((link_version, link_url))
- if len(urls) == 0:
+ if not urls:
print("Cannot find a link for version {}, versions {} found.".format(
version, self.links), file=sys.stderr)
for ver, generic_url in self.generic_links.iteritems():
@@ -224,7 +230,7 @@ class MultiVersionDownloader(object):
if "-" in version and ver != version:
continue
urls.append((ver, generic_url))
- if len(urls) == 0:
+ if not urls:
raise Exception(
"No fall-back generic link available or version {}.".format(version))
else:
@@ -262,7 +268,6 @@ class MultiVersionDownloader(object):
latest_downloaded = True
except requests.exceptions.HTTPError:
print("Failed to download {}".format(latest_url))
- pass
if not latest_downloaded:
print("Downloading data for version {} ({})...".format(version, full_version))
@@ -270,16 +275,16 @@ class MultiVersionDownloader(object):
download_file(url, temp_file)
return temp_file
- def uncompress_download(self, download_file):
- """Downloads the version specified and returns root of ."""
+ def uncompress_download(self, dl_file):
+ """Download the version specified and return root of extracted directory."""
print("Uncompressing data to {}...".format(self.install_dir))
first_file = ""
temp_dir = tempfile.mkdtemp()
- _, file_suffix = os.path.splitext(download_file)
+ _, file_suffix = os.path.splitext(dl_file)
if file_suffix == ".zip":
# Support .zip downloads, used for Windows binaries.
- with zipfile.ZipFile(download_file) as zip_handle:
+ with zipfile.ZipFile(dl_file) as zip_handle:
# Use the name of the root directory in the archive as the name of the directory
# to extract the binaries into inside 'self.install_dir'. The name of the root
# directory nearly always matches the parsed URL text, with the exception of
@@ -288,7 +293,7 @@ class MultiVersionDownloader(object):
zip_handle.extractall(temp_dir)
elif file_suffix == ".tgz":
# Support .tgz downloads, used for Linux binaries.
- with contextlib.closing(tarfile.open(download_file, "r:gz")) as tar_handle:
+ with contextlib.closing(tarfile.open(dl_file, "r:gz")) as tar_handle:
# Use the name of the root directory in the archive as the name of the directory
# to extract the binaries into inside 'self.install_dir'. The name of the root
# directory nearly always matches the parsed URL text, with the exception of
@@ -313,12 +318,12 @@ class MultiVersionDownloader(object):
shutil.move(temp_install_dir, self.install_dir)
shutil.rmtree(temp_dir)
- os.remove(download_file)
+ os.remove(dl_file)
return os.path.abspath(os.path.join(self.install_dir, extract_dir))
def symlink_version(self, version, installed_dir):
- """Symlinks the binaries in the 'installed_dir' to the 'link_dir.'"""
+ """Symlink the binaries in the 'installed_dir' to the 'link_dir'."""
try:
os.makedirs(self.link_dir)
except OSError as exc:
@@ -338,7 +343,7 @@ class MultiVersionDownloader(object):
if os.name == "nt":
# os.symlink is not supported on Windows, use a direct method instead.
def symlink_ms(source, link_name):
- """Provides symlink for Windows."""
+ """Provide symlink for Windows."""
import ctypes
csl = ctypes.windll.kernel32.CreateSymbolicLinkW
csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
@@ -357,7 +362,7 @@ class MultiVersionDownloader(object):
def main():
- """Main program."""
+ """Execute Main program."""
# Listen for SIGUSR1 and dump stack if received.
try:
@@ -420,7 +425,7 @@ we'll pull the highest non-rc version compatible with the version specified.
options, versions = parser.parse_args()
# Check for required options.
- if (not versions or not options.install_dir or not options.link_dir or not options.platform):
+ if not versions or not options.install_dir or not options.link_dir or not options.platform:
parser.print_help()
parser.exit(1)
diff --git a/buildscripts/tests/ciconfig/test_evergreen.py b/buildscripts/tests/ciconfig/test_evergreen.py
index 9409389f0a1..992bbc1a809 100644
--- a/buildscripts/tests/ciconfig/test_evergreen.py
+++ b/buildscripts/tests/ciconfig/test_evergreen.py
@@ -8,6 +8,8 @@ import unittest
import buildscripts.ciconfig.evergreen as _evergreen
+# pylint: disable=missing-docstring,protected-access
+
TEST_FILE_PATH = os.path.join(os.path.dirname(__file__), "evergreen.yml")
diff --git a/buildscripts/tests/ciconfig/test_tags.py b/buildscripts/tests/ciconfig/test_tags.py
index 7f8e923ab10..a60781506b4 100644
--- a/buildscripts/tests/ciconfig/test_tags.py
+++ b/buildscripts/tests/ciconfig/test_tags.py
@@ -6,6 +6,8 @@ import unittest
import buildscripts.ciconfig.tags as _tags
+# pylint: disable=missing-docstring,protected-access
+
TEST_FILE_PATH = os.path.join(os.path.dirname(__file__), "tags.yml")
diff --git a/buildscripts/tests/resmokelib/logging/__init__.py b/buildscripts/tests/resmokelib/logging/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/resmokelib/logging/__init__.py
+++ b/buildscripts/tests/resmokelib/logging/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/resmokelib/logging/test_buildlogger.py b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
index 734f418bf10..b57ec041f30 100644
--- a/buildscripts/tests/resmokelib/logging/test_buildlogger.py
+++ b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
@@ -7,6 +7,8 @@ import unittest
from buildscripts.resmokelib.logging import buildlogger
+# pylint: disable=missing-docstring,protected-access
+
class TestLogsSplitter(unittest.TestCase):
"""Unit tests for the _LogsSplitter class."""
diff --git a/buildscripts/tests/resmokelib/test_archival.py b/buildscripts/tests/resmokelib/test_archival.py
index e0a05d51b45..43a63fa17c6 100644
--- a/buildscripts/tests/resmokelib/test_archival.py
+++ b/buildscripts/tests/resmokelib/test_archival.py
@@ -11,6 +11,8 @@ import unittest
from buildscripts.resmokelib.utils import archival
+# pylint: disable=missing-docstring,protected-access
+
_BUCKET = "mongodatafiles"
diff --git a/buildscripts/tests/resmokelib/test_selector.py b/buildscripts/tests/resmokelib/test_selector.py
index 809bce67e70..50694c05a3e 100644
--- a/buildscripts/tests/resmokelib/test_selector.py
+++ b/buildscripts/tests/resmokelib/test_selector.py
@@ -10,6 +10,8 @@ import buildscripts.resmokelib.utils.globstar as globstar
import buildscripts.resmokelib.config
import buildscripts.resmokeconfig
+# pylint: disable=missing-docstring,protected-access
+
class TestExpressions(unittest.TestCase):
"""Unit tests for the tag matching expressions."""
@@ -119,7 +121,7 @@ class MockTestFileExplorer(object):
self.binary = "dbtest"
self.jstest_tag_file = {"dir/subdir1/test11.js": "tagA", "dir/subdir3/a/test3a1.js": "tagB"}
- def is_glob_pattern(self, pattern):
+ def is_glob_pattern(self, pattern): # pylint: disable=no-self-use
return globstar.is_glob_pattern(pattern)
def iglob(self, pattern):
@@ -132,21 +134,22 @@ class MockTestFileExplorer(object):
def jstest_tags(self, file_path):
return self.tags.get(file_path, [])
- def read_root_file(self, root_file_path):
+ def read_root_file(self, root_file_path): # pylint: disable=no-self-use,unused-argument
return ["build/testA", "build/testB"]
- def fnmatchcase(self, name, pattern):
+ def fnmatchcase(self, name, pattern): # pylint: disable=no-self-use
return fnmatch.fnmatchcase(name, pattern)
def isfile(self, path):
return path in self.files
- def list_dbtests(self, binary):
+ def list_dbtests(self, binary): # pylint: disable=no-self-use,unused-argument
return ["dbtestA", "dbtestB", "dbtestC"]
def parse_tag_file(self, test_kind):
if test_kind == "js_test":
return self.jstest_tag_file
+ return None
class TestTestList(unittest.TestCase):
diff --git a/buildscripts/tests/resmokelib/testing/fixtures/__init__.py b/buildscripts/tests/resmokelib/testing/fixtures/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/resmokelib/testing/fixtures/__init__.py
+++ b/buildscripts/tests/resmokelib/testing/fixtures/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py b/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
index ea2ce03b110..851c33e5318 100644
--- a/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
+++ b/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
@@ -5,9 +5,11 @@ import unittest
from buildscripts.resmokelib import errors
from buildscripts.resmokelib.testing.fixtures import interface
+# pylint: disable=missing-docstring,protected-access
+
class TestFixture(unittest.TestCase):
- def test_teardown_ok(self):
+ def test_teardown_ok(self): # pylint: disable=no-self-use
raising_fixture = UnitTestFixture(should_raise=False)
raising_fixture.teardown()
@@ -41,7 +43,7 @@ class TestFixtureTeardownHandler(unittest.TestCase):
self.assertEqual(expected_msg, handler.get_error_message())
-class UnitTestFixture(interface.Fixture):
+class UnitTestFixture(interface.Fixture): # pylint: disable=abstract-method
ERROR_MESSAGE = "Failed"
def __init__(self, should_raise=False):
diff --git a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
index d67e96a99b8..bb211fcd401 100755
--- a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
+++ b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+"""Unit tests for the resmokelib.testing.hooks.combine_benchmark_results module."""
from __future__ import absolute_import
@@ -9,6 +10,8 @@ import mock
import buildscripts.resmokelib.testing.hooks.combine_benchmark_results as cbr
+# pylint: disable=missing-docstring,protected-access
+
_BM_CONTEXT = {
"date": "2018/01/30-18:40:25", "num_cpus": 40, "mhz_per_cpu": 4999,
"cpu_scaling_enabled": False, "library_build_type": "debug"
@@ -26,8 +29,7 @@ _BM_REPORT_2 = {
_BM_MEAN_REPORT = {
"name": "BM_Name1/arg1/arg with space_mean", "iterations": 1000, "real_time": 1200,
- "cpu_time": 1300,
- "bytes_per_second": 1400, "items_per_second": 1500, "custom_counter_1": 1600
+ "cpu_time": 1300, "bytes_per_second": 1400, "items_per_second": 1500, "custom_counter_1": 1600
}
_BM_MULTITHREAD_REPORT = {
@@ -41,9 +43,11 @@ _BM_MULTITHREAD_MEDIAN_REPORT = {
}
_BM_FULL_REPORT = {
- "context": _BM_CONTEXT,
- "benchmarks": [_BM_REPORT_1, _BM_REPORT_2, _BM_MEAN_REPORT, _BM_MULTITHREAD_REPORT,
- _BM_MULTITHREAD_MEDIAN_REPORT]
+ "context":
+ _BM_CONTEXT, "benchmarks": [
+ _BM_REPORT_1, _BM_REPORT_2, _BM_MEAN_REPORT, _BM_MULTITHREAD_REPORT,
+ _BM_MULTITHREAD_MEDIAN_REPORT
+ ]
}
# 12/31/2999 @ 11:59pm (UTC)
@@ -58,7 +62,7 @@ class CombineBenchmarkResultsFixture(unittest.TestCase):
# Mock the hook's parent class because we're testing only functionality of this hook and
# not anything related to or inherit from the parent class.
@mock.patch("buildscripts.resmokelib.testing.hooks.interface.Hook", autospec=True)
- def setUp(self, MockHook):
+ def setUp(self, MockHook): # pylint: disable=arguments-differ,unused-argument
self.bm_threads_report = cbr._BenchmarkThreadsReport(_BM_CONTEXT)
self.cbr_hook = cbr.CombineBenchmarkResults(None, None)
@@ -118,8 +122,7 @@ class TestBenchmarkThreadsReport(CombineBenchmarkResultsFixture):
# Also test add_report() in the process.
self.bm_threads_report.add_report(
self.bm_threads_report.parse_bm_name(_BM_MULTITHREAD_REPORT["name"]),
- _BM_MULTITHREAD_REPORT
- )
+ _BM_MULTITHREAD_REPORT)
self.assertEqual(len(self.bm_threads_report.thread_benchmark_map.keys()), 1)
@@ -135,14 +138,10 @@ class TestBenchmarkThreadsReport(CombineBenchmarkResultsFixture):
def test_generate_single_thread_perf_plugin_dict(self):
self.bm_threads_report.add_report(
- self.bm_threads_report.parse_bm_name(_BM_REPORT_1["name"]),
- _BM_REPORT_1
- )
+ self.bm_threads_report.parse_bm_name(_BM_REPORT_1["name"]), _BM_REPORT_1)
self.bm_threads_report.add_report(
- self.bm_threads_report.parse_bm_name(_BM_REPORT_2["name"]),
- _BM_REPORT_2
- )
+ self.bm_threads_report.parse_bm_name(_BM_REPORT_2["name"]), _BM_REPORT_2)
self.assertEqual(len(self.bm_threads_report.thread_benchmark_map.keys()), 1)
diff --git a/buildscripts/tests/test_aws_ec2.py b/buildscripts/tests/test_aws_ec2.py
index c289a662821..3401ae569c8 100755
--- a/buildscripts/tests/test_aws_ec2.py
+++ b/buildscripts/tests/test_aws_ec2.py
@@ -7,11 +7,13 @@ import unittest
from buildscripts import aws_ec2
+# pylint: disable=invalid-name,missing-docstring,protected-access
+
_AMI = "ami-ed6bec86"
_INSTANCE_TYPE = "t1.micro"
-class AwsEc2TestCase(unittest.TestCase):
+class AwsEc2TestCase(unittest.TestCase): # pylint: disable=too-many-instance-attributes
def setUp(self):
self.aws_ec2 = aws_ec2.AwsEc2()
self.launched_instances = []
diff --git a/buildscripts/tests/test_fetch_test_lifecycle.py b/buildscripts/tests/test_fetch_test_lifecycle.py
index 25fac72766b..14c57435dab 100644
--- a/buildscripts/tests/test_fetch_test_lifecycle.py
+++ b/buildscripts/tests/test_fetch_test_lifecycle.py
@@ -5,6 +5,8 @@ import unittest
import buildscripts.fetch_test_lifecycle as fetch
+# pylint: disable=missing-docstring,protected-access
+
class TestFetchTestLifecycle(unittest.TestCase):
def test_get_metadata_revision(self):
@@ -62,7 +64,7 @@ class MockMetadataRepository(object):
def list_revisions(self):
return [r[0] for r in self.references_revisions]
- def get_reference(self, revision, project):
+ def get_reference(self, revision, project): # pylint: disable=unused-argument
for (metadata_revision, mongo_revision) in self.references_revisions:
if metadata_revision == revision:
return mongo_revision
diff --git a/buildscripts/tests/test_git.py b/buildscripts/tests/test_git.py
index 6a49e894581..db34ad2f6f9 100644
--- a/buildscripts/tests/test_git.py
+++ b/buildscripts/tests/test_git.py
@@ -7,6 +7,8 @@ import unittest
import buildscripts.git as _git
+# pylint: disable=missing-docstring,protected-access
+
class TestRepository(unittest.TestCase):
def setUp(self):
@@ -46,7 +48,7 @@ class TestRepository(unittest.TestCase):
def _check_gito_command(self, method, command, params):
# Initialize subprocess mock.
- self.subprocess.call_output_args = None
+ self.subprocess.call_output_args = None # pylint: disable=attribute-defined-outside-init
self.subprocess.call_output = str(method)
self.subprocess.call_returncode = 0
# Call method.
@@ -80,7 +82,7 @@ class MockSubprocess(object):
self.call_returncode = 0
self.call_output = ""
- def Popen(self, args, **kwargs):
+ def Popen(self, args, **kwargs): # pylint: disable=invalid-name,unused-argument
self.call_args = args
return MockProcess(self.call_returncode, self.call_output)
diff --git a/buildscripts/tests/test_test_failures.py b/buildscripts/tests/test_lifecycle_test_failures.py
index 84bfce21317..29829b6c237 100644
--- a/buildscripts/tests/test_test_failures.py
+++ b/buildscripts/tests/test_lifecycle_test_failures.py
@@ -7,7 +7,9 @@ from __future__ import absolute_import
import datetime
import unittest
-from buildscripts import test_failures
+from buildscripts import lifecycle_test_failures as test_failures
+
+# pylint: disable=invalid-name,missing-docstring,protected-access
class TestReportEntry(unittest.TestCase):
diff --git a/buildscripts/tests/test_remote_operations.py b/buildscripts/tests/test_remote_operations.py
index 955969c3c69..f119e4ffb8a 100755
--- a/buildscripts/tests/test_remote_operations.py
+++ b/buildscripts/tests/test_remote_operations.py
@@ -14,6 +14,8 @@ import unittest
from buildscripts import remote_operations as rop
+# pylint: disable=invalid-name,missing-docstring,protected-access
+
class RemoteOperationsTestCase(unittest.TestCase):
def setUp(self):
@@ -104,7 +106,7 @@ class RemoteOperationConnection(RemoteOperationsTestCase):
class RemoteOperationShell(RemoteOperationsTestCase):
- def runTest(self):
+ def runTest(self): # pylint: disable=too-many-statements
# Shell connect
ret, buff = self.rop.shell("uname")
@@ -186,7 +188,7 @@ class RemoteOperationShell(RemoteOperationsTestCase):
class RemoteOperationCopyTo(RemoteOperationsTestCase):
- def runTest(self):
+ def runTest(self): # pylint: disable=too-many-statements
# Copy to remote
l_temp_path = tempfile.mkstemp(dir=self.temp_local_dir)[1]
@@ -305,7 +307,7 @@ class RemoteOperationCopyTo(RemoteOperationsTestCase):
class RemoteOperationCopyFrom(RemoteOperationsTestCase):
- def runTest(self):
+ def runTest(self): # pylint: disable=too-many-statements
# Copy from remote
r_temp_path = tempfile.mkstemp(dir=self.temp_remote_dir)[1]
diff --git a/buildscripts/tests/test_update_test_lifecycle.py b/buildscripts/tests/test_update_test_lifecycle.py
index 64ffc6b8b7a..f9447ba5902 100644
--- a/buildscripts/tests/test_update_test_lifecycle.py
+++ b/buildscripts/tests/test_update_test_lifecycle.py
@@ -9,10 +9,12 @@ import copy
import datetime
import unittest
-from buildscripts import test_failures
+from buildscripts import lifecycle_test_failures as test_failures
from buildscripts import update_test_lifecycle
from buildscripts.ciconfig import tags as ci_tags
+# pylint: disable=invalid-name,missing-docstring,protected-access,too-many-lines
+
class TestValidateConfig(unittest.TestCase):
"""
@@ -321,7 +323,7 @@ class TestValidateConfig(unittest.TestCase):
update_test_lifecycle.validate_config(config)
-class TestUpdateTags(unittest.TestCase):
+class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-methods
"""
Tests for the update_tags() function.
"""
diff --git a/buildscripts/update_test_lifecycle.py b/buildscripts/update_test_lifecycle.py
index 922c4a5da63..9e06d42a086 100755
--- a/buildscripts/update_test_lifecycle.py
+++ b/buildscripts/update_test_lifecycle.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-"""Test Failures
+"""Test Failures module.
Update etc/test_lifecycle.yml to tag unreliable tests based on historic failure rates.
"""
@@ -26,14 +26,18 @@ import yaml
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+# pylint: disable=wrong-import-position
from buildscripts import git
from buildscripts import jiraclient
from buildscripts import resmokelib
from buildscripts.resmokelib import utils
from buildscripts.resmokelib.utils import globstar
-from buildscripts import test_failures as tf
+from buildscripts import lifecycle_test_failures as tf
from buildscripts.ciconfig import evergreen as ci_evergreen
from buildscripts.ciconfig import tags as ci_tags
+# pylint: enable=wrong-import-position
+
+# pylint: disable=too-many-lines
LOGGER = logging.getLogger(__name__)
@@ -126,10 +130,10 @@ def create_batch_groups(test_groups, batch_size):
class TestHistorySource(object):
"""A class used to parallelize requests to buildscripts.test_failures.TestHistory."""
- def __init__(self, project, variants, distros, start_revision, end_revision,
- thread_pool_size=DEFAULT_NUM_THREADS):
- """
- Initializes the TestHistorySource.
+ def __init__( # pylint: disable=too-many-arguments
+ self, project, variants, distros, start_revision, end_revision,
+ thread_pool_size=DEFAULT_NUM_THREADS):
+ """Initialize the TestHistorySource.
Args:
project: the Evergreen project name.
@@ -147,7 +151,7 @@ class TestHistorySource(object):
self._thread_pool = multiprocessing.dummy.Pool(thread_pool_size)
def get_history_data(self, tests, tasks):
- """Retrieves the history data for the given tests and tasks.
+ """Retrieve the history data for the given tests and tasks.
The requests for each task will be parallelized using the internal thread pool.
"""
@@ -173,7 +177,7 @@ def callo(args):
def git_commit_range_since(since):
- """Returns first and last commit in 'since' period specified.
+ """Return first and last commit in 'since' period specified.
Specify 'since' as any acceptable period for git log --since.
The period can be specified as '4.weeks' or '3.days'.
@@ -184,7 +188,7 @@ def git_commit_range_since(since):
def git_commit_prior(revision):
- """Returns commit revision prior to one specified."""
+ """Return commit revision prior to one specified."""
git_format = "git log -2 {revision} --pretty=format:%H"
git_command = git_format.format(revision=revision)
commits = callo(git_command.split()).split("\n")
@@ -223,7 +227,7 @@ def check_days(name, days):
def unreliable_tag(task, variant, distro):
- """Returns the unreliable tag."""
+ """Return the unreliable tag."""
for (component_name, component_value) in (("task", task), ("variant", variant), ("distro",
distro)):
@@ -238,8 +242,9 @@ def unreliable_tag(task, variant, distro):
return "unreliable|{}|{}|{}".format(task, variant, distro)
-def update_lifecycle(lifecycle_tags_file, report, method_test, add_tags, fail_rate, min_run):
- """Updates the lifecycle object based on the test_method.
+def update_lifecycle( # pylint: disable=too-many-arguments
+ lifecycle_tags_file, report, method_test, add_tags, fail_rate, min_run):
+ """Update the lifecycle object based on the test_method.
The test_method checks unreliable or reliable fail_rates.
"""
@@ -254,16 +259,12 @@ def update_lifecycle(lifecycle_tags_file, report, method_test, add_tags, fail_ra
def compare_tags(tag_a, tag_b):
- """Compare two tags and return 1, -1 or 0 if 'tag_a' is superior, inferior or
- equal to 'tag_b'.
- """
+ """Return 1, -1 or 0 if 'tag_a' is superior, inferior or equal to 'tag_b'."""
return cmp(tag_a.split("|"), tag_b.split("|"))
-def validate_config(config):
- """
- Raises a TypeError or ValueError exception if 'config' isn't a valid model.
- """
+def validate_config(config): # pylint: disable=too-many-branches
+ """Raise a TypeError or ValueError exception if 'config' isn't a valid model."""
for (name, fail_rates) in (("test", config.test_fail_rates), ("task", config.task_fail_rates),
("variant", config.variant_fail_rates), ("distro",
@@ -315,10 +316,9 @@ def validate_config(config):
def _test_combination_from_entry(entry, components):
- """Creates a test combination tuple from a tf._ReportEntry and target components.
+ """Create a test combination tuple from a tf._ReportEntry and target components.
- Returns:
- A tuple containing the entry fields specified in components.
+ Return a tuple containing the entry fields specified in components.
"""
combination = []
for component in components:
@@ -327,10 +327,9 @@ def _test_combination_from_entry(entry, components):
def _test_combination_from_tag(test, tag):
- """Creates a test combination tuple from a test name and a tag.
+ """Create a test combination tuple from a test name and a tag.
- Returns:
- A tuple containing the test name and the components found in the tag.
+ Return a tuple containing the test name and the components found in the tag.
"""
combination = [test]
for element in _split_tag(tag):
@@ -339,20 +338,24 @@ def _test_combination_from_tag(test, tag):
return tuple(combination)
-def update_tags(lifecycle_tags, config, report, tests):
- """
- Updates the tags in 'lifecycle_tags' based on the historical test failures of tests 'tests'
+def update_tags(lifecycle_tags, config, report, tests): # pylint: disable=too-many-locals
+ """Update the tags in 'lifecycle_tags'.
+
+ This is based on the historical test failures of tests 'tests'
mentioned in 'report' according to the model described by 'config'.
"""
# We initialize 'grouped_entries' to make PyLint not complain about 'grouped_entries' being used
# before assignment.
grouped_entries = None
- for (i, (components, rates)) in enumerate(
- ((tf.Report.TEST_TASK_VARIANT_DISTRO,
- config.distro_fail_rates), (tf.Report.TEST_TASK_VARIANT, config.variant_fail_rates),
- (tf.Report.TEST_TASK, config.task_fail_rates), (tf.Report.TEST, config.test_fail_rates))):
- if i > 0:
+ # yapf: disable
+ for (idx, (components, rates)) in enumerate(
+ ((tf.Report.TEST_TASK_VARIANT_DISTRO, config.distro_fail_rates),
+ (tf.Report.TEST_TASK_VARIANT, config.variant_fail_rates),
+ (tf.Report.TEST_TASK, config.task_fail_rates),
+ (tf.Report.TEST, config.test_fail_rates))):
+ # yapf: enable
+ if idx > 0:
report = tf.Report(grouped_entries)
# We reassign the value of 'grouped_entries' to take advantage of how data that is on
@@ -403,7 +406,8 @@ def update_tags(lifecycle_tags, config, report, tests):
update_lifecycle(lifecycle_tags, reliable_summaries, reliable_test, False, rates.acceptable,
config.reliable_min_runs)
- def should_be_removed(test, tag):
+ def should_be_removed(test, tag, components, reliable_combinations):
+ """Return True if 'combination' shoud be removed."""
combination = _test_combination_from_tag(test, tag)
if len(combination) != len(components):
# The tag is not for these components.
@@ -414,7 +418,7 @@ def update_tags(lifecycle_tags, config, report, tests):
for test in tests:
tags = lifecycle_tags.lifecycle.get_tags("js_test", test)
for tag in tags[:]:
- if should_be_removed(test, tag):
+ if should_be_removed(test, tag, components, reliable_combinations):
LOGGER.info("Removing tag '%s' of test '%s' because the combination did not run"
" during the reliable period", tag, test)
lifecycle_tags.remove_tag("js_test", test, tag, failure_rate=0)
@@ -484,9 +488,7 @@ def _config_as_options(config):
class TagsConfigWithChangelog(object):
- """A wrapper around TagsConfig that can perform updates on a tags file and record the
- modifications made.
- """
+ """A wrapper around TagsConfig to update a tags file and record the modifications made."""
def __init__(self, lifecycle):
"""Initialize the TagsConfigWithChangelog with the lifecycle TagsConfig."""
@@ -539,17 +541,22 @@ class TagsConfigWithChangelog(object):
class JiraIssueCreator(object):
+ """JiraIssueCreator class."""
+
_LABEL = "test-lifecycle"
_PROJECT = "TIGBOT"
_MAX_DESCRIPTION_SIZE = 32767
- def __init__(self, server=None, username=None, password=None, access_token=None,
- access_token_secret=None, consumer_key=None, key_cert=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, server=None, username=None, password=None, access_token=None,
+ access_token_secret=None, consumer_key=None, key_cert=None):
+ """Initialize JiraIssueCreator."""
self._client = jiraclient.JiraClient(
server=server, username=username, password=password, access_token=access_token,
access_token_secret=access_token_secret, consumer_key=consumer_key, key_cert=key_cert)
- def create_issue(self, evg_project, mongo_revision, model_config, added, removed, cleaned_up):
+ def create_issue( # pylint: disable=too-many-arguments
+ self, evg_project, mongo_revision, model_config, added, removed, cleaned_up):
"""Create a JIRA issue for the test lifecycle tag update."""
summary = self._get_jira_summary(evg_project)
description = self._get_jira_description(evg_project, mongo_revision, model_config, added,
@@ -587,7 +594,8 @@ class JiraIssueCreator(object):
return desc
@staticmethod
- def _get_jira_description(project, mongo_revision, model_config, added, removed, cleaned_up):
+ def _get_jira_description( # pylint: disable=too-many-arguments
+ project, mongo_revision, model_config, added, removed, cleaned_up):
mono = JiraIssueCreator._monospace
config_desc = _config_as_options(model_config)
added_desc = JiraIssueCreator._make_updated_tags_description(added)
@@ -624,8 +632,7 @@ class JiraIssueCreator(object):
tags_lines.append("--- {0} ({1:.2f})".format(mono(tag), coefficient))
if tags_lines:
return "\n".join(tags_lines)
- else:
- return "_None_"
+ return "_None_"
@staticmethod
def _make_tags_cleaned_up_description(cleaned_up):
@@ -645,15 +652,16 @@ class JiraIssueCreator(object):
tags_cleaned_up_lines.append("--- {0}".format(mono(tag)))
if tags_cleaned_up_lines:
return "\n".join(tags_cleaned_up_lines)
- else:
- return "_None_"
+ return "_None_"
-class LifecycleTagsFile(object):
+class LifecycleTagsFile(object): # pylint: disable=too-many-instance-attributes
"""Represent a test lifecycle tags file that can be written and committed."""
- def __init__(self, project, lifecycle_file, metadata_repo_url=None, references_file=None,
- jira_issue_creator=None, git_info=None, model_config=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, project, lifecycle_file, metadata_repo_url=None, references_file=None,
+ jira_issue_creator=None, git_info=None,
+ model_config=None): # noqa: D214,D401,D405,D406,D407,D411,D413
"""Initalize the LifecycleTagsFile.
Arguments:
@@ -815,9 +823,8 @@ class LifecycleTagsFile(object):
if pushed:
self.jira_issue_creator.close_fix_issue(issue_key)
return True
- else:
- self.jira_issue_creator.close_wontfix_issue(issue_key)
- return False
+ self.jira_issue_creator.close_wontfix_issue(issue_key)
+ return False
def make_lifecycle_tags_file(options, model_config):
@@ -842,10 +849,10 @@ def make_lifecycle_tags_file(options, model_config):
return lifecycle_tags_file
-def main():
- """
- Utility for updating a resmoke.py tag file based on computing test failure rates from the
- Evergreen API.
+def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ """Exexcute utility to update a resmoke.py tag file.
+
+ This is based on computing test failure rates from the Evergreen API.
"""
parser = optparse.OptionParser(
@@ -1058,10 +1065,11 @@ def main():
distros = options.distros.split(",") if options.distros else []
config = Config(
- test_fail_rates=Rates(*options.test_fail_rates), task_fail_rates=Rates(
- *options.task_fail_rates), variant_fail_rates=Rates(
- *options.variant_fail_rates), distro_fail_rates=Rates(
- *options.distro_fail_rates), reliable_min_runs=options.reliable_test_min_runs,
+ test_fail_rates=Rates(*options.test_fail_rates),
+ task_fail_rates=Rates(*options.task_fail_rates),
+ variant_fail_rates=Rates(*options.variant_fail_rates),
+ distro_fail_rates=Rates(*options.distro_fail_rates),
+ reliable_min_runs=options.reliable_test_min_runs,
reliable_time_period=datetime.timedelta(days=options.reliable_days),
unreliable_min_runs=options.unreliable_test_min_runs,
unreliable_time_period=datetime.timedelta(days=options.unreliable_days))
diff --git a/buildscripts/utils.py b/buildscripts/utils.py
index a710de822be..5073b26ad85 100644
--- a/buildscripts/utils.py
+++ b/buildscripts/utils.py
@@ -8,7 +8,8 @@ import subprocess
import sys
-def getAllSourceFiles(arr=None, prefix="."):
+def get_all_source_files(arr=None, prefix="."):
+ """Return source files."""
if arr is None:
arr = []
@@ -17,14 +18,17 @@ def getAllSourceFiles(arr=None, prefix="."):
arr.append(prefix)
return arr
- for x in os.listdir(prefix):
- if (x.startswith(".") or x.startswith("pcre-") or x.startswith("32bit")
- or x.startswith("mongodb-") or x.startswith("debian")
- or x.startswith("mongo-cxx-driver") or x.startswith("sqlite") or "gotools" in x
- or x.find("mozjs") != -1):
+ for fx in os.listdir(prefix):
+ # pylint: disable=too-many-boolean-expressions
+ if (fx.startswith(".") or fx.startswith("pcre-") or fx.startswith("32bit")
+ or fx.startswith("mongodb-") or fx.startswith("debian")
+ or fx.startswith("mongo-cxx-driver") or fx.startswith("sqlite") or "gotools" in fx
+ or fx.find("mozjs") != -1):
continue
+ # pylint: enable=too-many-boolean-expressions
- def isFollowableDir(prefix, full):
+ def is_followable_dir(prefix, full):
+ """Return True if 'full' is a followable directory."""
if not os.path.isdir(full):
return False
if not os.path.islink(full):
@@ -34,9 +38,9 @@ def getAllSourceFiles(arr=None, prefix="."):
return True
return False
- full = prefix + "/" + x
- if isFollowableDir(prefix, full):
- getAllSourceFiles(arr, full)
+ full = prefix + "/" + fx
+ if is_followable_dir(prefix, full):
+ get_all_source_files(arr, full)
else:
if full.endswith(".cpp") or full.endswith(".h") or full.endswith(".c"):
full = full.replace("//", "/")
@@ -45,7 +49,8 @@ def getAllSourceFiles(arr=None, prefix="."):
return arr
-def getGitBranch():
+def get_git_branch():
+ """Return the git branch version."""
if not os.path.exists(".git") or not os.path.isdir(".git"):
return None
@@ -57,23 +62,25 @@ def getGitBranch():
return version
-def getGitBranchString(prefix="", postfix=""):
- t = re.compile("[/\\\]").split(os.getcwd())
- if len(t) > 2 and t[len(t) - 1] == "mongo":
- par = t[len(t) - 2]
- m = re.compile(".*_([vV]\d+\.\d+)$").match(par)
- if m is not None:
- return prefix + m.group(1).lower() + postfix
+def get_git_branch_string(prefix="", postfix=""):
+ """Return the git branch name."""
+ tt = re.compile(r"[/\\]").split(os.getcwd())
+ if len(tt) > 2 and tt[len(tt) - 1] == "mongo":
+ par = tt[len(tt) - 2]
+ mt = re.compile(r".*_([vV]\d+\.\d+)$").match(par)
+ if mt is not None:
+ return prefix + mt.group(1).lower() + postfix
if par.find("Nightly") > 0:
return ""
- b = getGitBranch()
- if b is None or b == "master":
+ branch = get_git_branch()
+ if branch is None or branch == "master":
return ""
- return prefix + b + postfix
+ return prefix + branch + postfix
-def getGitVersion():
+def get_git_version():
+ """Return the git version."""
if not os.path.exists(".git") or not os.path.isdir(".git"):
return "nogitversion"
@@ -81,13 +88,14 @@ def getGitVersion():
if not version.startswith("ref: "):
return version
version = version[5:]
- f = ".git/" + version
- if not os.path.exists(f):
+ git_ver = ".git/" + version
+ if not os.path.exists(git_ver):
return version
- return open(f, "r").read().strip()
+ return open(git_ver, "r").read().strip()
-def getGitDescribe():
+def get_git_describe():
+ """Return 'git describe'."""
with open(os.devnull, "r+") as devnull:
proc = subprocess.Popen("git describe", stdout=subprocess.PIPE, stderr=devnull,
stdin=devnull, shell=True)
@@ -95,16 +103,17 @@ def getGitDescribe():
def execsys(args):
- import subprocess
+ """Execute a subprocess of 'args'."""
if isinstance(args, str):
- r = re.compile("\s+")
- args = r.split(args)
- p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- r = p.communicate()
- return r
+ rc = re.compile(r"\s+")
+ args = rc.split(args)
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ res = proc.communicate()
+ return res
def which(executable):
+ """Return full path of 'executable'."""
if sys.platform == "win32":
paths = os.environ.get("Path", "").split(";")
else:
@@ -122,6 +131,7 @@ def which(executable):
def find_python(min_version=(2, 5)):
+ """Return path of python."""
try:
if sys.version_info >= min_version:
return sys.executable
@@ -141,19 +151,20 @@ def find_python(min_version=(2, 5)):
versiontuple = tuple(map(int, match.group(1).split(".")))
if versiontuple >= min_version:
return which(binary)
- except:
+ except Exception: # pylint: disable=broad-except
pass
raise Exception("could not find suitable Python (version >= %s)" % ".".join(
str(v) for v in min_version))
-# unicode is a pain. some strings cannot be unicode()'d
-# but we want to just preserve the bytes in a human-readable
-# fashion. this codec error handler will substitute the
-# repr() of the offending bytes into the decoded string
-# at the position they occurred
def replace_with_repr(unicode_error):
+ """Codec error handler replacement."""
+ # Unicode is a pain, some strings cannot be unicode()'d
+ # but we want to just preserve the bytes in a human-readable
+ # fashion. This codec error handler will substitute the
+ # repr() of the offending bytes into the decoded string
+ # at the position they occurred
offender = unicode_error.object[unicode_error.start:unicode_error.end]
return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
diff --git a/buildscripts/yaml_key_value.py b/buildscripts/yaml_key_value.py
index 353e87df7ea..27e975433d4 100755
--- a/buildscripts/yaml_key_value.py
+++ b/buildscripts/yaml_key_value.py
@@ -9,7 +9,7 @@ import yaml
def get_yaml_value(yaml_file, yaml_key):
- """Return string value for 'yaml_key' from 'yaml_file.'"""
+ """Return string value for 'yaml_key' from 'yaml_file'."""
with open(yaml_file, "r") as ystream:
yaml_dict = yaml.safe_load(ystream)
return str(yaml_dict.get(yaml_key, ""))
@@ -20,8 +20,8 @@ def main():
parser = optparse.OptionParser(description=__doc__)
parser.add_option("--yamlFile", dest="yaml_file", default=None, help="YAML file to read")
- parser.add_option(
- "--yamlKey", dest="yaml_key", default=None, help="Top level YAML key to provide the value")
+ parser.add_option("--yamlKey", dest="yaml_key", default=None,
+ help="Top level YAML key to provide the value")
(options, _) = parser.parse_args()
if not options.yaml_file:
@@ -31,5 +31,6 @@ def main():
print(get_yaml_value(options.yaml_file, options.yaml_key))
+
if __name__ == "__main__":
main()
diff --git a/etc/evergreen.yml b/etc/evergreen.yml
index fb8002171fa..948ef8a4655 100644
--- a/etc/evergreen.yml
+++ b/etc/evergreen.yml
@@ -681,11 +681,15 @@ functions:
set -o verbose
python_loc=$(which ${python|/opt/mongodbtoolchain/v2/bin/python2})
+ python3_loc=$(which ${python|/opt/mongodbtoolchain/v2/bin/python3})
if [ "Windows_NT" = "$OS" ]; then
python_loc=$(cygpath -w $python_loc)
+ python3_loc=$(cygpath -w c:/python/Python36/python.exe)
fi
# Set up virtualenv in ${workdir}
virtualenv --python "$python_loc" --system-site-packages "${workdir}/venv"
+ # Add virtualenv for python3 in ${workdir}
+ virtualenv --python "$python3_loc" --system-site-packages "${workdir}/venv_3"
"run tests" :
- command: expansions.update
@@ -1190,7 +1194,7 @@ functions:
if [ ! -z "${virtualenv_requirements}" ]; then
${activate_virtualenv}
- pip install ${virtualenv_requirements}
+ pip install -r pytests/requirements.txt
fi
- command: shell.exec
@@ -1495,7 +1499,8 @@ functions:
set -o errexit
${activate_virtualenv}
- pip install boto3==1.5.27
+ pip install -r buildscripts/requirements.txt
+ pip install -r pytests/requirements.txt
if [ ! -z "${subnet_id}" ]; then
subnet_id="-n ${subnet_id}"
@@ -2015,7 +2020,29 @@ functions:
else
python=${python|/opt/mongodbtoolchain/v2/bin/python2}
fi
- echo "python set to $(which python)"
+ echo "python set to $(which $python)"
+ - key: activate_virtualenv_3
+ value: |
+ # check if virtualenv for python3 is set up
+ if [ -d "${workdir}/venv_3" ]; then
+ if [ "Windows_NT" = "$OS" ]; then
+ # Need to quote the path on Windows to preserve the separator.
+ . "${workdir}/venv_3/Scripts/activate" 2> /tmp/activate_error.log
+ else
+ . ${workdir}/venv_3/bin/activate 2> /tmp/activate_error.log
+ fi
+ if [ $? -ne 0 ]; then
+ echo "Failed to activate virtualenv: $(cat /tmp/activate_error.log)"
+ fi
+ python=python
+ else
+ if [ "Windows_NT" = "$OS" ]; then
+ python=/cygdrive/c/python/Python36/python
+ else
+ python=${python3|/opt/mongodbtoolchain/v2/bin/python3}
+ fi
+ fi
+ echo "python set to $(which $python)"
- key: posix_workdir
value: eval 'if [ "Windows_NT" = "$OS" ]; then echo $(cygpath -u "${workdir}"); else echo ${workdir}; fi'
# For ssh disable the options GSSAPIAuthentication, CheckHostIP, StrictHostKeyChecking
@@ -2969,10 +2996,19 @@ tasks:
set -o errexit
set -o verbose
+ ### TODO: Remove python3 when mypy 0.580 is installed in the toolchain.
+ # Since mypy requires python3, we need to activate the venv_3
+ ${activate_virtualenv_3}
+ pip install -r buildscripts/requirements.txt
+ updated_mypy=$(which mypy)
+ deactivate
+ ###
${activate_virtualenv}
# TODO: Remove once the linters have been updated on the variants.
pip install -r buildscripts/requirements.txt
- ${compile_env|} $python ./buildscripts/scons.py ${compile_flags|} --stack-size=1024 lint
+ # The linters require the modules be installed.
+ pip install -r pytests/requirements.txt
+ MYPY=$updated_mypy ${compile_env|} $python ./buildscripts/scons.py ${compile_flags|} --stack-size=1024 lint
- <<: *task_template
name: burn_in_tests
@@ -5618,7 +5654,7 @@ buildvariants:
max_jobs: 8 # Avoid starting too many mongod's on ARM test servers
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: arm64
packager_distro: ubuntu1604
repo_edition: enterprise
@@ -5740,7 +5776,7 @@ buildvariants:
num_jobs_available: 2
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: ppc64le
packager_distro: ubuntu1604
repo_edition: enterprise
@@ -6092,7 +6128,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=amazon2 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: amazon2
repo_edition: enterprise
@@ -7185,7 +7221,7 @@ buildvariants:
multiversion_edition: enterprise
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel62
repo_edition: enterprise
@@ -7503,7 +7539,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel70
repo_edition: enterprise
@@ -7768,7 +7804,7 @@ buildvariants:
num_jobs_available: $(echo "$(grep -c processor /proc/cpuinfo)/4" | bc)
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: ppc64le
packager_distro: rhel71
push_path: linux
@@ -7874,7 +7910,7 @@ buildvariants:
num_jobs_available: 2
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: rhel72
push_path: linux
@@ -7980,7 +8016,7 @@ buildvariants:
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: rhel67
push_path: linux
@@ -8084,7 +8120,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=ubuntu1404 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: ubuntu1404
repo_edition: enterprise
@@ -8162,7 +8198,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=ubuntu1604 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: ubuntu1604
repo_edition: enterprise
@@ -8247,7 +8283,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=suse12 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: suse12
repo_edition: enterprise
@@ -8304,7 +8340,7 @@ buildvariants:
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
test_flags: --excludeWithAnyTags=requires_mmapv1
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: suse12
repo_edition: enterprise
@@ -8498,7 +8534,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=debian71 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: debian71
repo_edition: enterprise
@@ -8554,7 +8590,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=debian81 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: debian81
repo_edition: enterprise
@@ -8785,7 +8821,7 @@ buildvariants:
compile_flags: --ssl MONGO_DISTMOD=debian92 --release -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: debian92
repo_edition: enterprise
@@ -9920,7 +9956,7 @@ buildvariants:
multiversion_edition: enterprise
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
has_packages: true
- packager_script: packager-enterprise.py
+ packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel62
repo_edition: enterprise
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000000..7d3f3753832
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,20 @@
+[mypy]
+# Check Python 2 code for type annotations in comments.
+python_version = 2.7
+
+[mypy-*]
+disallow_untyped_defs = False
+# Do not error on imported files since all imported files may not be mypy clean.
+follow_imports = silent
+# Do not error if imports are not found.
+# This can be a problem with standalone scripts and relative imports.
+# This will limit effectiveness but avoids mypy complaining about running code.
+ignore_missing_imports = True
+
+[mypy-idl*]
+# Error if any code is missing type annotations.
+disallow_untyped_defs = True
+
+[mypy-lint*]
+# Error if any code is missing type annotations.
+disallow_untyped_defs = True
diff --git a/pytests/powertest.py b/pytests/powertest.py
index 2047f176aa8..2ae5acdd0f3 100755
--- a/pytests/powertest.py
+++ b/pytests/powertest.py
@@ -1,7 +1,19 @@
#!/usr/bin/env python
-"""Powercycle test
+"""Powercycle test.
Tests robustness of mongod to survive multiple powercycle events.
+
+Client & server side powercycle test script.
+
+This script can be run against any host which is reachable via ssh.
+Note - the remote hosts should be running bash shell (this script may fail otherwise).
+There are no assumptions on the server what is the current deployment of MongoDB.
+For Windows the assumption is that Cygwin is installed.
+The server needs these utilities:
+ - python 2.7 or higher
+ - sshd
+ - rsync
+This script will either download a MongoDB tarball or use an existing setup.
"""
from __future__ import print_function
@@ -10,7 +22,7 @@ import atexit
import collections
import copy
import datetime
-import distutils.spawn
+import distutils.spawn # pylint: disable=no-name-in-module
import json
import importlib
import logging
@@ -50,7 +62,7 @@ if os.name == "posix" and sys.version_info[0] == 2:
" available. When using the subprocess module, a child process may"
" trigger an invalid free(). See SERVER-22219 for more details."),
RuntimeWarning)
- import subprocess
+ import subprocess # type: ignore
else:
import subprocess
@@ -65,8 +77,10 @@ _IS_DARWIN = sys.platform == "darwin"
def _try_import(module, name=None):
- """Attempts to import a module and add it as a global variable.
- If the import fails, then this function doesn't trigger an exception."""
+ """Attempt to import a module and add it as a global variable.
+
+ If the import fails, then this function doesn't trigger an exception.
+ """
try:
module_name = module if not name else name
globals()[module_name] = importlib.import_module(module)
@@ -92,39 +106,27 @@ if _IS_WINDOWS:
_try_import("win32service")
_try_import("win32serviceutil")
+# pylint: disable=too-many-lines
+
__version__ = "0.1"
LOGGER = logging.getLogger(__name__)
-_report_json_success = False
-_report_json = {}
-_report_json_file = ""
-""" Client & server side powercycle test script.
-
- This script can be run against any host which is reachable via ssh.
- Note - the remote hosts should be running bash shell (this script may fail otherwise).
- There are no assumptions on the server what is the current deployment of MongoDB.
- For Windows the assumption is that Cygwin is installed.
- The server needs these utilities:
- - python 2.7 or higher
- - sshd
- - rsync
- This script will either download a MongoDB tarball or use an existing setup. """
+REPORT_JSON = {} # type: ignore
+REPORT_JSON_FILE = ""
+REPORT_JSON_SUCCESS = False
def exit_handler():
- """Exit handler actions:
- - Generate report.json
- - Kill spawned processes
- - Delete all named temporary files
- """
- if _report_json:
- LOGGER.debug("Exit handler: Updating report file %s", _report_json_file)
+ """Exit handler to generate report.json, kill spawned processes, delete temporary files."""
+
+ if REPORT_JSON:
+ LOGGER.debug("Exit handler: Updating report file %s", REPORT_JSON_FILE)
try:
- test_start = _report_json["results"][0]["start"]
+ test_start = REPORT_JSON["results"][0]["start"]
test_end = int(time.time())
test_time = test_end - test_start
- if _report_json_success:
+ if REPORT_JSON_SUCCESS:
failures = 0
status = "pass"
exit_code = 0
@@ -132,35 +134,35 @@ def exit_handler():
failures = 1
status = "fail"
exit_code = 1
- _report_json["failures"] = failures
- _report_json["results"][0]["status"] = status
- _report_json["results"][0]["exit_code"] = exit_code
- _report_json["results"][0]["end"] = test_end
- _report_json["results"][0]["elapsed"] = test_time
- with open(_report_json_file, "w") as jstream:
- json.dump(_report_json, jstream)
- LOGGER.debug("Exit handler: report file contents %s", _report_json)
- except:
+ REPORT_JSON["failures"] = failures
+ REPORT_JSON["results"][0]["status"] = status
+ REPORT_JSON["results"][0]["exit_code"] = exit_code
+ REPORT_JSON["results"][0]["end"] = test_end
+ REPORT_JSON["results"][0]["elapsed"] = test_time
+ with open(REPORT_JSON_FILE, "w") as jstream:
+ json.dump(REPORT_JSON, jstream)
+ LOGGER.debug("Exit handler: report file contents %s", REPORT_JSON)
+ except: # pylint: disable=bare-except
pass
LOGGER.debug("Exit handler: Killing processes")
try:
Processes.kill_all()
- except:
+ except: # pylint: disable=bare-except
pass
LOGGER.debug("Exit handler: Cleaning up temporary files")
try:
NamedTempFile.delete_all()
- except:
+ except: # pylint: disable=bare-except
pass
def register_signal_handler(handler):
+ """Register the signal handler."""
+
def _handle_set_event(event_handle, handler):
- """
- Windows event object handler that will dump the stacks of all threads.
- """
+ """Event object handler that will dump the stacks of all threads for Windows."""
while True:
try:
# Wait for task time out to dump stacks.
@@ -203,14 +205,12 @@ def register_signal_handler(handler):
signal.signal(signal_num, handler)
-def dump_stacks_and_exit(signum, frame):
- """
- Handler that will dump the stacks of all threads.
- """
+def dump_stacks_and_exit(signum, frame): # pylint: disable=unused-argument
+ """Provide a handler that will dump the stacks of all threads."""
LOGGER.info("Dumping stacks!")
sb = []
- frames = sys._current_frames()
+ frames = sys._current_frames() # pylint: disable=protected-access
sb.append("Total threads: {}\n".format(len(frames)))
sb.append("")
@@ -223,13 +223,13 @@ def dump_stacks_and_exit(signum, frame):
if _IS_WINDOWS:
exit_handler()
- os._exit(1)
+ os._exit(1) # pylint: disable=protected-access
else:
sys.exit(1)
def child_processes(parent_pid):
- """Returns a list of all child processes for a pid."""
+ """Return a list of all child processes for a pid."""
# The child processes cannot be obtained from the parent on Windows from psutil. See
# https://stackoverflow.com/questions/30220732/python-psutil-not-showing-all-child-processes
child_procs = []
@@ -282,12 +282,12 @@ def kill_processes(procs, kill_children=True):
def get_extension(filename):
- """Returns the extension of a file."""
+ """Return the extension of a file."""
return os.path.splitext(filename)[-1]
def abs_path(path):
- """Returns absolute path for 'path'. Raises an exception on failure."""
+ """Return absolute path for 'path'. Raises an exception on failure."""
if _IS_WINDOWS:
# Get the Windows absolute path.
cmd = "cygpath -wa {}".format(path)
@@ -300,15 +300,16 @@ def abs_path(path):
def symlink_dir(source_dir, dest_dir):
- """Symlinks the 'dest_dir' to 'source_dir'."""
+ """Symlink the 'dest_dir' to 'source_dir'."""
if _IS_WINDOWS:
- win32file.CreateSymbolicLink(dest_dir, source_dir, win32file.SYMBOLIC_LINK_FLAG_DIRECTORY)
+ win32file.CreateSymbolicLink( # pylint: disable=undefined-variable
+ dest_dir, source_dir, win32file.SYMBOLIC_LINK_FLAG_DIRECTORY) # pylint: disable=undefined-variable
else:
os.symlink(source_dir, dest_dir)
def get_bin_dir(root_dir):
- """Locates the 'bin' directory within 'root_dir' tree."""
+ """Locate the 'bin' directory within 'root_dir' tree."""
for root, dirs, _ in os.walk(root_dir):
if "bin" in dirs:
return os.path.join(root, "bin")
@@ -316,7 +317,7 @@ def get_bin_dir(root_dir):
def create_temp_executable_file(cmds):
- """Creates an executable temporary file containing 'cmds'. Returns file name."""
+ """Create an executable temporary file containing 'cmds'. Returns file name."""
temp_file_name = NamedTempFile.create(suffix=".sh", directory="tmp")
with NamedTempFile.get(temp_file_name) as temp_file:
temp_file.write(cmds)
@@ -326,7 +327,7 @@ def create_temp_executable_file(cmds):
def start_cmd(cmd, use_file=False):
- """Starts command and returns proc instance from Popen"""
+ """Start command and returns proc instance from Popen."""
orig_cmd = ""
# Multi-commands need to be written to a temporary file to execute on Windows.
@@ -353,7 +354,7 @@ def start_cmd(cmd, use_file=False):
def execute_cmd(cmd, use_file=False):
- """Executes command and returns return_code, output from command"""
+ """Execute command and returns return_code, output from command."""
orig_cmd = ""
# Multi-commands need to be written to a temporary file to execute on Windows.
@@ -387,17 +388,18 @@ def execute_cmd(cmd, use_file=False):
def get_user_host(user_host):
- """ Returns a tuple (user, host) from the user_host string. """
+ """Return a tuple (user, host) from the user_host string."""
if "@" in user_host:
return tuple(user_host.split("@"))
return None, user_host
def parse_options(options):
- """ Parses options and returns a dict.
+ """Parse options and returns a dict.
- Since there are options which can be specifed with a short('-') or long
- ('--') form, we preserve that in key map as {option_name: (value, form)}."""
+ Since there are options which can be specifed with a short('-') or long
+ ('--') form, we preserve that in key map as {option_name: (value, form)}.
+ """
options_map = collections.defaultdict(list)
opts = shlex.split(options)
for opt in opts:
@@ -419,7 +421,7 @@ def parse_options(options):
def download_file(url, file_name, download_retries=5):
- """Returns True if download was successful. Raises error if download fails."""
+ """Return True if download was successful, raise error if download fails."""
LOGGER.info("Downloading %s to %s", url, file_name)
while download_retries > 0:
@@ -459,7 +461,7 @@ def download_file(url, file_name, download_retries=5):
def install_tarball(tarball, root_dir):
- """ Unzip and install 'tarball' into 'root_dir'."""
+ """Unzip and install 'tarball' into 'root_dir'."""
LOGGER.info("Installing %s to %s", tarball, root_dir)
output = ""
@@ -503,7 +505,8 @@ def install_tarball(tarball, root_dir):
fi ;
done ;
popd ;
- """.format(tarball=tarball, tmp_dir=tmp_dir, root_dir=root_dir)
+ """.format( # pylint: disable=bad-continuation
+ tarball=tarball, tmp_dir=tmp_dir, root_dir=root_dir)
ret, output = execute_cmd(cmds, use_file=True)
shutil.rmtree(tmp_dir)
else:
@@ -516,7 +519,7 @@ def install_tarball(tarball, root_dir):
def chmod_x_binaries(bin_dir):
- """ Change all file permissions in 'bin_dir' to executable for everyone. """
+ """Change all file permissions in 'bin_dir' to executable for everyone."""
files = os.listdir(bin_dir)
LOGGER.debug("chmod +x %s %s", bin_dir, files)
@@ -527,25 +530,27 @@ def chmod_x_binaries(bin_dir):
def chmod_w_file(chmod_file):
- """ Change the permission for 'chmod_file' to '+w' for everyone. """
+ """Change the permission for 'chmod_file' to '+w' for everyone."""
LOGGER.debug("chmod +w %s", chmod_file)
if _IS_WINDOWS:
# The os package cannot set the directory to '+w', so we use win32security.
# See https://stackoverflow.com/
# questions/12168110/setting-folder-permissions-in-windows-using-python
+ # pylint: disable=undefined-variable,unused-variable
user, domain, sec_type = win32security.LookupAccountName("", "Everyone")
file_sd = win32security.GetFileSecurity(chmod_file, win32security.DACL_SECURITY_INFORMATION)
dacl = file_sd.GetSecurityDescriptorDacl()
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_WRITE, user)
file_sd.SetSecurityDescriptorDacl(1, dacl, 0)
win32security.SetFileSecurity(chmod_file, win32security.DACL_SECURITY_INFORMATION, file_sd)
+ # pylint: enable=undefined-variable,unused-variable
else:
os.chmod(chmod_file, os.stat(chmod_file) | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
def set_windows_bootstatuspolicy():
- """ For Windows hosts that are physical, this prevents boot to prompt after failure."""
+ """For Windows hosts that are physical, this prevents boot to prompt after failure."""
LOGGER.info("Setting bootstatuspolicy to ignoreallfailures & boot timeout to 5 seconds")
cmds = """
@@ -558,13 +563,14 @@ def set_windows_bootstatuspolicy():
def install_mongod(bin_dir=None, tarball_url="latest", root_dir=None):
- """Sets up 'root_dir'/bin to contain MongoDB binaries.
+ """Set up 'root_dir'/bin to contain MongoDB binaries.
- If 'bin_dir' is specified, then symlink it to 'root_dir'/bin.
- Otherwise, download 'tarball_url' and symlink it's bin to 'root_dir'/bin.
+ If 'bin_dir' is specified, then symlink it to 'root_dir'/bin.
+ Otherwise, download 'tarball_url' and symlink it's bin to 'root_dir'/bin.
- If 'bin_dir' is specified, skip download and create symlink
- from 'bin_dir' to 'root_dir'/bin."""
+ If 'bin_dir' is specified, skip download and create symlink
+ from 'bin_dir' to 'root_dir'/bin.
+ """
LOGGER.debug("install_mongod: %s %s %s", bin_dir, tarball_url, root_dir)
# Create 'root_dir', if it does not exist.
@@ -601,7 +607,7 @@ def install_mongod(bin_dir=None, tarball_url="latest", root_dir=None):
def print_uptime():
- """Prints the last time the system was booted, and the uptime (in seconds). """
+ """Print the last time the system was booted, and the uptime (in seconds)."""
boot_time_epoch = psutil.boot_time()
boot_time = datetime.datetime.fromtimestamp(boot_time_epoch).strftime('%Y-%m-%d %H:%M:%S.%f')
uptime = int(time.time() - boot_time_epoch)
@@ -609,31 +615,31 @@ def print_uptime():
def call_remote_operation(local_ops, remote_python, script_name, client_args, operation):
- """ Call the remote operation and returns tuple (ret, ouput). """
+ """Call the remote operation and returns tuple (ret, ouput)."""
client_call = "{} {} {} {}".format(remote_python, script_name, client_args, operation)
ret, output = local_ops.shell(client_call)
return ret, output
def is_instance_running(ret, aws_status):
- """ Return true if instance is in a running state. """
+ """Return true if instance is in a running state."""
return ret == 0 and aws_status.state["Name"] == "running"
class Processes(object):
"""Class to create and kill spawned processes."""
- _PROC_LIST = []
+ _PROC_LIST = [] # type: ignore
@classmethod
def create(cls, cmds):
- """Creates a spawned process."""
+ """Create a spawned process."""
proc = start_cmd(cmds, use_file=True)
cls._PROC_LIST.append(proc)
@classmethod
def kill(cls, proc):
- """Kills a spawned process and all it's children."""
+ """Kill a spawned process and all it's children."""
kill_processes([proc], kill_children=True)
cls._PROC_LIST.remove(proc)
@@ -648,12 +654,12 @@ class Processes(object):
class NamedTempFile(object):
"""Class to control temporary files."""
- _FILE_MAP = {}
- _DIR_LIST = []
+ _FILE_MAP = {} # type: ignore
+ _DIR_LIST = [] # type: ignore
@classmethod
def create(cls, directory=None, suffix=""):
- """Creates a temporary file, and optional directory, and returns the file name."""
+ """Create a temporary file, and optional directory, and returns the file name."""
if directory and not os.path.isdir(directory):
LOGGER.debug("Creating temporary directory %s", directory)
os.makedirs(directory)
@@ -664,14 +670,14 @@ class NamedTempFile(object):
@classmethod
def get(cls, name):
- """Gets temporary file object. Raises an exception if the file is unknown."""
+ """Get temporary file object. Raises an exception if the file is unknown."""
if name not in cls._FILE_MAP:
raise Exception("Unknown temporary file {}.".format(name))
return cls._FILE_MAP[name]
@classmethod
def delete(cls, name):
- """Deletes temporary file. Raises an exception if the file is unknown."""
+ """Delete temporary file. Raises an exception if the file is unknown."""
if name not in cls._FILE_MAP:
raise Exception("Unknown temporary file {}.".format(name))
if not os.path.exists(name):
@@ -687,7 +693,7 @@ class NamedTempFile(object):
@classmethod
def delete_dir(cls, directory):
- """Deletes temporary directory. Raises an exception if the directory is unknown."""
+ """Delete temporary directory. Raises an exception if the directory is unknown."""
if directory not in cls._DIR_LIST:
raise Exception("Unknown temporary directory {}.".format(directory))
if not os.path.exists(directory):
@@ -703,7 +709,7 @@ class NamedTempFile(object):
@classmethod
def delete_all(cls):
- """Deletes all temporary files and directories."""
+ """Delete all temporary files and directories."""
for name in list(cls._FILE_MAP):
cls.delete(name)
for directory in cls._DIR_LIST:
@@ -711,10 +717,11 @@ class NamedTempFile(object):
class ProcessControl(object):
- """ Process control class.
+ """Process control class.
- Control processes either by name or a list of pids. If name is supplied, then
- all matching pids are controlled."""
+ Control processes either by name or a list of pids. If name is supplied, then
+ all matching pids are controlled.
+ """
def __init__(self, name=None, pids=None):
"""Provide either 'name' or 'pids' to control the process."""
@@ -727,7 +734,7 @@ class ProcessControl(object):
self.procs = []
def get_pids(self):
- """ Returns list of process ids for process 'self.name'."""
+ """Return list of process ids for process 'self.name'."""
if not self.name:
return self.pids
self.pids = []
@@ -737,7 +744,7 @@ class ProcessControl(object):
return self.pids
def get_name(self):
- """ Returns process name or name of first running process from pids."""
+ """Return process name or name of first running process from pids."""
if not self.name:
for pid in self.get_pids():
proc = psutil.Process(pid)
@@ -747,21 +754,21 @@ class ProcessControl(object):
return self.name
def get_procs(self):
- """ Returns a list of 'proc' for the associated pids."""
+ """Return a list of 'proc' for the associated pids."""
procs = []
for pid in self.get_pids():
procs.append(psutil.Process(pid))
return procs
def is_running(self):
- """ Returns true if any process is running that either matches on name or pids."""
+ """Return true if any process is running that either matches on name or pids."""
for pid in self.get_pids():
if psutil.pid_exists(pid):
return True
return False
def kill(self):
- """ Kills all running processes that match the list of pids. """
+ """Kill all running processes that match the list of pids."""
if self.is_running():
for proc in self.get_procs():
try:
@@ -771,10 +778,12 @@ class ProcessControl(object):
proc.pid)
+# pylint: disable=undefined-variable,unused-variable
class WindowsService(object):
- """ Windows service control class."""
+ """Windows service control class."""
def __init__(self, name, bin_path, bin_options, start_type=None):
+ """Initialize WindowsService."""
self.name = name
self.bin_name = os.path.basename(bin_path)
@@ -796,7 +805,7 @@ class WindowsService(object):
}
def create(self):
- """ Create service, if not installed. Returns (code, output) tuple. """
+ """Create service, if not installed. Return (code, output) tuple."""
if self.status() in self._states.values():
return 1, "Service '{}' already installed, status: {}".format(self.name, self.status())
try:
@@ -812,7 +821,7 @@ class WindowsService(object):
return ret, output
def update(self):
- """ Update installed service. Returns (code, output) tuple. """
+ """Update installed service. Return (code, output) tuple."""
if self.status() not in self._states.values():
return 1, "Service update '{}' status: {}".format(self.name, self.status())
try:
@@ -828,7 +837,7 @@ class WindowsService(object):
return ret, output
def delete(self):
- """ Delete service. Returns (code, output) tuple. """
+ """Delete service. Return (code, output) tuple."""
if self.status() not in self._states.values():
return 1, "Service delete '{}' status: {}".format(self.name, self.status())
try:
@@ -842,7 +851,7 @@ class WindowsService(object):
return ret, output
def start(self):
- """ Start service. Returns (code, output) tuple. """
+ """Start service. Return (code, output) tuple."""
if self.status() not in self._states.values():
return 1, "Service start '{}' status: {}".format(self.name, self.status())
try:
@@ -859,7 +868,7 @@ class WindowsService(object):
return ret, output
def stop(self):
- """ Stop service. Returns (code, output) tuple. """
+ """Stop service. Return (code, output) tuple."""
self.pids = []
if self.status() not in self._states.values():
return 1, "Service '{}' status: {}".format(self.name, self.status())
@@ -874,7 +883,7 @@ class WindowsService(object):
return ret, output
def status(self):
- """ Returns state of the service as a string. """
+ """Return state of the service as a string."""
try:
# QueryServiceStatus returns a tuple:
# (scvType, svcState, svcControls, err, svcErr, svcCP, svcWH)
@@ -888,37 +897,42 @@ class WindowsService(object):
return "not installed"
def get_pids(self):
- """ Return list of pids for service. """
+ """Return list of pids for service."""
return self.pids
+# pylint: enable=undefined-variable,unused-variable
+
+
class PosixService(object):
- """ Service control on POSIX systems.
+ """Service control on POSIX systems.
- Simulates service control for background processes which fork themselves,
- i.e., mongod with '--fork'."""
+ Simulates service control for background processes which fork themselves,
+ i.e., mongod with '--fork'.
+ """
def __init__(self, name, bin_path, bin_options):
+ """Initialize PosixService."""
self.name = name
self.bin_path = bin_path
self.bin_name = os.path.basename(bin_path)
self.bin_options = bin_options
self.pids = []
- def create(self):
- """ Simulates create service. Returns (code, output) tuple. """
+ def create(self): # pylint: disable=no-self-use
+ """Simulate create service. Returns (code, output) tuple."""
return 0, None
- def update(self):
- """ Simulates update service. Returns (code, output) tuple. """
+ def update(self): # pylint: disable=no-self-use
+ """Simulate update service. Returns (code, output) tuple."""
return 0, None
- def delete(self):
- """ Simulates delete service. Returns (code, output) tuple. """
+ def delete(self): # pylint: disable=no-self-use
+ """Simulate delete service. Returns (code, output) tuple."""
return 0, None
def start(self):
- """ Start process. Returns (code, output) tuple. """
+ """Start process. Returns (code, output) tuple."""
cmd = "{} {}".format(self.bin_path, self.bin_options)
ret, output = execute_cmd(cmd)
if not ret:
@@ -927,27 +941,29 @@ class PosixService(object):
return ret, output
def stop(self):
- """ Stop process. Returns (code, output) tuple. """
+ """Stop process. Returns (code, output) tuple."""
proc = ProcessControl(name=self.bin_name)
proc.kill()
self.pids = []
return 0, None
def status(self):
- """ Returns status of service. """
+ """Return status of service."""
if self.get_pids():
return "running"
return "stopped"
def get_pids(self):
- """ Return list of pids for process. """
+ """Return list of pids for process."""
return self.pids
-class MongodControl(object):
- """ Control mongod process. """
+class MongodControl(object): # pylint: disable=too-many-instance-attributes
+ """Control mongod process."""
- def __init__(self, bin_dir, db_path, log_path, port, options=None):
+ def __init__( # pylint: disable=too-many-arguments
+ self, bin_dir, db_path, log_path, port, options=None):
+ """Initialize MongodControl."""
extension = ".exe" if _IS_WINDOWS else ""
self.process_name = "mongod{}".format(extension)
@@ -979,19 +995,19 @@ class MongodControl(object):
self.service = self._service("mongod-powertest", self.bin_path, self.mongod_options())
def set_mongod_option(self, option, option_value=None, option_form="--"):
- """ Sets mongod command line option. """
+ """Set mongod command line option."""
self.options_map[option] = (option_value, option_form)
def get_mongod_option(self, option):
- """ Returns tuple of (value, form). """
+ """Return tuple of (value, form)."""
return self.options_map[option]
def get_mongod_service(self):
- """ Returns the service object used to control mongod. """
+ """Return the service object used to control mongod."""
return self.service
def mongod_options(self):
- """ Returns string of mongod options, which can be used when invoking mongod. """
+ """Return string of mongod options, which can be used when invoking mongod."""
opt_string = ""
for opt_name in self.options_map:
opt_val, opt_form = self.options_map[opt_name]
@@ -1001,7 +1017,7 @@ class MongodControl(object):
return opt_string
def install(self, root_dir, tarball_url):
- """ Returns tuple (ret, ouput). """
+ """Return tuple (ret, ouput)."""
# Install mongod, if 'root_dir' does not exist.
if os.path.isdir(root_dir):
LOGGER.warning("Root dir %s already exists", root_dir)
@@ -1020,62 +1036,67 @@ class MongodControl(object):
return ret, output
def uninstall(self):
- """ Returns tuple (ret, ouput). """
+ """Return tuple (ret, ouput)."""
return self.service.delete()
- def cleanup(self, root_dir):
- """ Returns tuple (ret, ouput). """
+ @staticmethod
+ def cleanup(root_dir):
+ """Return tuple (ret, ouput)."""
shutil.rmtree(root_dir, ignore_errors=True)
return 0, None
def start(self):
- """ Returns tuple (ret, ouput). """
+ """Return tuple (ret, ouput)."""
return self.service.start()
def update(self):
- """ Returns tuple (ret, ouput). """
+ """Return tuple (ret, ouput)."""
return self.service.update()
def stop(self):
- """ Returns tuple (ret, ouput). """
+ """Return tuple (ret, ouput)."""
return self.service.stop()
def get_pids(self):
- """ Return list of pids for process. """
+ """Return list of pids for process."""
return self.service.get_pids()
class LocalToRemoteOperations(object):
- """ Local operations handler class for sending commands to the remote host.
+ """Local operations handler class for sending commands to the remote host.
- Returns (return code, output). """
+ Return (return code, output).
+ """
- def __init__(self, user_host, ssh_connection_options=None, ssh_options=None,
- shell_binary="/bin/bash", use_shell=False):
+ def __init__( # pylint: disable=too-many-arguments
+ self, user_host, ssh_connection_options=None, ssh_options=None,
+ shell_binary="/bin/bash", use_shell=False):
+ """Initialize LocalToRemoteOperations."""
- self.remote_op = remote_operations.RemoteOperations(
+ self.remote_op = remote_operations.RemoteOperations( # pylint: disable=undefined-variable
user_host=user_host, ssh_connection_options=ssh_connection_options,
ssh_options=ssh_options, retries=10, retry_sleep=10, debug=True,
shell_binary=shell_binary, use_shell=use_shell)
def shell(self, cmds, remote_dir=None):
- """ Returns tuple (ret, output) from performing remote shell operation. """
+ """Return tuple (ret, output) from performing remote shell operation."""
return self.remote_op.shell(cmds, remote_dir)
def copy_from(self, files, remote_dir=None):
- """ Returns tuple (ret, output) from performing remote copy_to operation. """
+ """Return tuple (ret, output) from performing remote copy_to operation."""
return self.remote_op.copy_from(files, remote_dir)
def copy_to(self, files, remote_dir=None):
- """ Returns tuple (ret, output) from performing remote copy_from operation. """
+ """Return tuple (ret, output) from performing remote copy_from operation."""
return self.remote_op.copy_to(files, remote_dir)
-def remote_handler(options, operations):
- """ Remote operations handler executes all remote operations on the remote host.
+def remote_handler(options, operations): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ """Remote operations handler executes all remote operations on the remote host.
- These operations are invoked on the remote host's copy of this script.
- Only one operation can be performed at a time. """
+ These operations are invoked on the remote host's copy of this script.
+ Only one operation can be performed at a time.
+ """
# Set 'root_dir' to absolute path.
root_dir = abs_path(options.root_dir)
@@ -1222,7 +1243,7 @@ def remote_handler(options, operations):
def rsync(src_dir, dest_dir, exclude_files=None):
- """ Rsync 'src_dir' to 'dest_dir'. """
+ """Rsync 'src_dir' to 'dest_dir'."""
# Note rsync on Windows requires a Unix-style directory.
exclude_options = ""
exclude_str = ""
@@ -1242,7 +1263,7 @@ def rsync(src_dir, dest_dir, exclude_files=None):
def internal_crash(use_sudo=False, crash_option=None):
- """ Internally crash the host this excutes on. """
+ """Internally crash the host this excutes on."""
# Windows can use NotMyFault to immediately crash itself, if it's been installed.
# See https://docs.microsoft.com/en-us/sysinternals/downloads/notmyfault
@@ -1274,9 +1295,9 @@ def internal_crash(use_sudo=False, crash_option=None):
return 1, "Crash did not occur"
-def crash_server(options, crash_canary, canary_port, local_ops, script_name, client_args):
- """ Crashes server and optionally writes canary doc before crash.
- Returns tuple (ret, output). """
+def crash_server( # pylint: disable=too-many-arguments
+ options, crash_canary, canary_port, local_ops, script_name, client_args):
+ """Crash server and optionally writes canary doc before crash. Return tuple (ret, output)."""
crash_wait_time = options.crash_wait_time + random.randint(0, options.crash_wait_time_jitter)
LOGGER.info("Crashing server in %d seconds", crash_wait_time)
@@ -1312,7 +1333,7 @@ def crash_server(options, crash_canary, canary_port, local_ops, script_name, cli
]
elif options.crash_method == "aws_ec2":
- ec2 = aws_ec2.AwsEc2()
+ ec2 = aws_ec2.AwsEc2() # pylint: disable=undefined-variable
crash_func = ec2.control_instance
crash_args = ["force-stop", options.instance_id, 600, True]
@@ -1330,9 +1351,7 @@ def crash_server(options, crash_canary, canary_port, local_ops, script_name, cli
def wait_for_mongod_shutdown(data_dir, timeout=120):
- """ Waits for for mongod to shutdown.
-
- Returns 0 if shutdown occurs within 'timeout', else 1. """
+ """Wait for for mongod to shutdown; return 0 if shutdown occurs within 'timeout', else 1."""
lock_file = os.path.join(data_dir, "mongod.lock")
LOGGER.info("Waiting for mongod to release lockfile %s", lock_file)
@@ -1346,12 +1365,13 @@ def wait_for_mongod_shutdown(data_dir, timeout=120):
return 0
-def get_mongo_client_args(host=None, port=None, options=None, serverSelectionTimeoutMS=600000,
- socketTimeoutMS=600000):
- """ Returns keyword arg dict used in PyMongo client. """
+def get_mongo_client_args(host=None, port=None, options=None, server_selection_timeout_ms=600000,
+ socket_timeout_ms=600000):
+ """Return keyword arg dict used in PyMongo client."""
# Set the default serverSelectionTimeoutMS & socketTimeoutMS to 10 minutes.
mongo_args = {
- "serverSelectionTimeoutMS": serverSelectionTimeoutMS, "socketTimeoutMS": socketTimeoutMS
+ "serverSelectionTimeoutMS": server_selection_timeout_ms,
+ "socketTimeoutMS": socket_timeout_ms
}
if host:
mongo_args["host"] = host
@@ -1366,12 +1386,11 @@ def get_mongo_client_args(host=None, port=None, options=None, serverSelectionTim
return mongo_args
-def mongo_shell(mongo_path, work_dir, host_port, mongo_cmds, retries=5, retry_sleep=5):
- """Starts mongo_path from work_dir, connecting to host_port and executes mongo_cmds."""
- cmds = ("""
- cd {};
- echo {} | {} {}""".format(
- pipes.quote(work_dir), pipes.quote(mongo_cmds), pipes.quote(mongo_path), host_port))
+def mongo_shell( # pylint: disable=too-many-arguments
+ mongo_path, work_dir, host_port, mongo_cmds, retries=5, retry_sleep=5):
+ """Start mongo_path from work_dir, connecting to host_port and executes mongo_cmds."""
+ cmds = "cd {}; echo {} | {} {}".format(
+ pipes.quote(work_dir), pipes.quote(mongo_cmds), pipes.quote(mongo_path), host_port)
attempt_num = 0
while True:
ret, output = execute_cmd(cmds, use_file=True)
@@ -1385,8 +1404,7 @@ def mongo_shell(mongo_path, work_dir, host_port, mongo_cmds, retries=5, retry_sl
def mongod_wait_for_primary(mongo, timeout=60, sleep_interval=3):
- """ Return True if the mongod primary is available in replica set,
- within the specified timeout."""
+ """Return True if mongod primary is available in replica set, within the specified timeout."""
start = time.time()
while not mongo.admin.command("isMaster")["ismaster"]:
@@ -1397,7 +1415,7 @@ def mongod_wait_for_primary(mongo, timeout=60, sleep_interval=3):
def mongo_reconfig_replication(mongo, host_port, repl_set):
- """ Reconfigure the mongod replica set. Return 0 if successful."""
+ """Reconfigure the mongod replica set. Return 0 if successful."""
# TODO: Rework reconfig logic as follows:
# 1. Start up mongod in standalone
@@ -1441,10 +1459,10 @@ def mongo_reconfig_replication(mongo, host_port, repl_set):
def mongo_seed_docs(mongo, db_name, coll_name, num_docs):
- """ Seed a collection with random document values. """
+ """Seed a collection with random document values."""
def rand_string(max_length=1024):
- """Returns random string of random length. """
+ """Return random string of random length."""
return ''.join(random.choice(string.letters) for _ in range(random.randint(1, max_length)))
LOGGER.info("Seeding DB '%s' collection '%s' with %d documents, %d already exist", db_name,
@@ -1466,7 +1484,7 @@ def mongo_seed_docs(mongo, db_name, coll_name, num_docs):
def mongo_validate_collections(mongo):
- """ Validates the mongo collections. Returns 0 if all are valid. """
+ """Validate the mongo collections, return 0 if all are valid."""
LOGGER.info("Validating all collections")
invalid_colls = []
@@ -1489,13 +1507,13 @@ def mongo_validate_collections(mongo):
def mongo_validate_canary(mongo, db_name, coll_name, doc):
- """ Validates a canary document. Returns 0 if the document exists. """
+ """Validate a canary document, return 0 if the document exists."""
LOGGER.info("Validating canary document %s", doc)
return 0 if not doc or mongo[db_name][coll_name].find_one(doc) else 1
def mongo_insert_canary(mongo, db_name, coll_name, doc):
- """ Inserts a canary document with 'j' True. Returns 0 if successful. """
+ """Insert a canary document with 'j' True, return 0 if successful."""
LOGGER.info("Inserting canary document %s to DB %s Collection %s", doc, db_name, coll_name)
coll = mongo[db_name][coll_name].with_options(
write_concern=pymongo.write_concern.WriteConcern(j=True))
@@ -1504,7 +1522,7 @@ def mongo_insert_canary(mongo, db_name, coll_name, doc):
def new_resmoke_config(config_file, new_config_file, test_data, eval_str=""):
- """ Creates 'new_config_file', from 'config_file', with an update from 'test_data'. """
+ """Create 'new_config_file', from 'config_file', with an update from 'test_data'."""
new_config = {
"executor": {
"config": {"shell_options": {"eval": eval_str, "global_vars": {"TestData": test_data}}}
@@ -1517,9 +1535,10 @@ def new_resmoke_config(config_file, new_config_file, test_data, eval_str=""):
yaml.safe_dump(config, yaml_stream)
-def resmoke_client(work_dir, mongo_path, host_port, js_test, resmoke_suite, repeat_num=1,
- no_wait=False, log_file=None):
- """Starts resmoke client from work_dir, connecting to host_port and executes js_test."""
+def resmoke_client( # pylint: disable=too-many-arguments
+ work_dir, mongo_path, host_port, js_test, resmoke_suite, repeat_num=1, no_wait=False,
+ log_file=None):
+ """Start resmoke client from work_dir, connecting to host_port and executes js_test."""
log_output = ">> {} 2>&1".format(log_file) if log_file else ""
cmds = ("cd {}; "
"python buildscripts/resmoke.py"
@@ -1540,12 +1559,14 @@ def resmoke_client(work_dir, mongo_path, host_port, js_test, resmoke_suite, repe
return ret, output
-def main():
- """ Main program. """
+def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
+ """Execute Main program."""
- global _report_json_success
- global _report_json
- global _report_json_file
+ # pylint: disable=global-statement
+ global REPORT_JSON
+ global REPORT_JSON_FILE
+ global REPORT_JSON_SUCCESS
+ # pylint: enable=global-statement
atexit.register(exit_handler)
register_signal_handler(dump_stacks_and_exit)
@@ -1592,10 +1613,10 @@ Examples:
" -o StrictHostKeyChecking=no"
" -o ConnectTimeout=30"
" -o ConnectionAttempts=25")
- test_options.add_option(
- "--sshConnection", dest="ssh_connection_options",
- help=("Server ssh additional connection options, i.e., '-i ident.pem'"
- " which are added to '{}'".format(default_ssh_connection_options)), default=None)
+ test_options.add_option("--sshConnection", dest="ssh_connection_options",
+ help="Server ssh additional connection options, i.e., '-i ident.pem'"
+ " which are added to '{}'".format(default_ssh_connection_options),
+ default=None)
test_options.add_option("--testLoops", dest="num_loops",
help="Number of powercycle loops to run [default: %default]",
@@ -1614,26 +1635,26 @@ Examples:
default=None)
test_options.add_option("--backupPathBefore", dest="backup_path_before",
- help=("Path where the db_path is backed up before crash recovery,"
- " defaults to '<rootDir>/data-beforerecovery'"), default=None)
+ help="Path where the db_path is backed up before crash recovery,"
+ " defaults to '<rootDir>/data-beforerecovery'", default=None)
test_options.add_option("--backupPathAfter", dest="backup_path_after",
- help=("Path where the db_path is backed up after crash recovery,"
- " defaults to '<rootDir>/data-afterrecovery'"), default=None)
+ help="Path where the db_path is backed up after crash recovery,"
+ " defaults to '<rootDir>/data-afterrecovery'", default=None)
validate_locations = ["local", "remote"]
test_options.add_option("--validate", dest="validate_collections",
- help=("Run validate on all collections after mongod restart after"
- " a powercycle. Choose from {} to specify where the"
- " validate runs.".format(validate_locations)),
+ help="Run validate on all collections after mongod restart after"
+ " a powercycle. Choose from {} to specify where the"
+ " validate runs.".format(validate_locations),
choices=validate_locations, default=None)
canary_locations = ["local", "remote"]
test_options.add_option("--canary", dest="canary",
- help=("Generate and validate canary document between powercycle"
- " events. Choose from {} to specify where the canary is"
- " generated from. If the 'crashMethod' is not 'internal"
- " then this option must be 'local'.".format(canary_locations)),
+ help="Generate and validate canary document between powercycle"
+ " events. Choose from {} to specify where the canary is"
+ " generated from. If the 'crashMethod' is not 'internal"
+ " then this option must be 'local'.".format(canary_locations),
choices=canary_locations, default=None)
test_options.add_option("--docForCanary", dest="canary_doc", help=optparse.SUPPRESS_HELP,
@@ -1650,12 +1671,12 @@ Examples:
default="cycle")
test_options.add_option("--writeConcern", dest="write_concern",
- help=("mongo (shell) CRUD client writeConcern, i.e.,"
- " '{\"w\": \"majority\"}' [default: '%default']"), default="{}")
+ help="mongo (shell) CRUD client writeConcern, i.e.,"
+ " '{\"w\": \"majority\"}' [default: '%default']", default="{}")
test_options.add_option("--readConcernLevel", dest="read_concern_level",
- help=("mongo (shell) CRUD client readConcernLevel, i.e.,"
- "'majority'"), default=None)
+ help="mongo (shell) CRUD client readConcernLevel, i.e.,"
+ "'majority'", default=None)
# Crash options
crash_methods = ["aws_ec2", "internal", "mpower"]
@@ -1667,28 +1688,27 @@ Examples:
"private_ip_address", "public_ip_address", "private_dns_name", "public_dns_name"
]
crash_options.add_option("--crashOption", dest="crash_option",
- help=("Secondary argument for the following --crashMethod:"
- " 'aws_ec2': specify EC2 'address_type', which is one of {} and"
- " defaults to 'public_ip_address'."
- " 'mpower': specify output<num> to turn"
- " off/on, i.e., 'output1' (REQUIRED)."
- " 'internal': for Windows, optionally specify a crash method,"
- " i.e., 'notmyfault/notmyfaultc64.exe"
- " -accepteula crash 1'".format(aws_address_types)), default=None)
-
- crash_options.add_option(
- "--instanceId", dest="instance_id",
- help=("The instance ID of an AWS EC2 host. If specified, this instance"
- " will be started after a crash, if it is not in a running state."
- " This is required if --crashOption is 'aws_ec2'."), default=None)
+ help="Secondary argument for the following --crashMethod:"
+ " 'aws_ec2': specify EC2 'address_type', which is one of {} and"
+ " defaults to 'public_ip_address'."
+ " 'mpower': specify output<num> to turn"
+ " off/on, i.e., 'output1' (REQUIRED)."
+ " 'internal': for Windows, optionally specify a crash method,"
+ " i.e., 'notmyfault/notmyfaultc64.exe"
+ " -accepteula crash 1'".format(aws_address_types), default=None)
+
+ crash_options.add_option("--instanceId", dest="instance_id",
+ help="The instance ID of an AWS EC2 host. If specified, this instance"
+ " will be started after a crash, if it is not in a running state."
+ " This is required if --crashOption is 'aws_ec2'.", default=None)
crash_options.add_option("--crashWaitTime", dest="crash_wait_time",
- help=("Time, in seconds, to wait before issuing crash [default:"
- " %default]"), type="int", default=30)
+ help="Time, in seconds, to wait before issuing crash [default:"
+ " %default]", type="int", default=30)
crash_options.add_option("--jitterForCrashWaitTime", dest="crash_wait_time_jitter",
- help=("The maximum time, in seconds, to be added to --crashWaitTime,"
- " as a uniform distributed random value, [default: %default]"),
+ help="The maximum time, in seconds, to be added to --crashWaitTime,"
+ " as a uniform distributed random value, [default: %default]",
type="int", default=10)
crash_options.add_option("--sshCrashUserHost", dest="ssh_crash_user_host",
@@ -1701,30 +1721,30 @@ Examples:
# MongoDB options
mongodb_options.add_option("--downloadUrl", dest="tarball_url",
- help=("URL of tarball to test, if unspecifed latest tarball will be"
- " used"), default="latest")
+ help="URL of tarball to test, if unspecifed latest tarball will be"
+ " used", default="latest")
mongodb_options.add_option("--rootDir", dest="root_dir",
- help=("Root directory, on remote host, to install tarball and data"
- " directory [default: 'mongodb-powertest-<epochSecs>']"),
+ help="Root directory, on remote host, to install tarball and data"
+ " directory [default: 'mongodb-powertest-<epochSecs>']",
default=None)
mongodb_options.add_option("--mongodbBinDir", dest="mongodb_bin_dir",
- help=("Directory, on remote host, containing mongoDB binaries,"
- " overrides bin from tarball in --downloadUrl"), default=None)
+ help="Directory, on remote host, containing mongoDB binaries,"
+ " overrides bin from tarball in --downloadUrl", default=None)
mongodb_options.add_option("--dbPath", dest="db_path",
- help=("Data directory to use, on remote host, if unspecified"
- " it will be '<rootDir>/data/db'"), default=None)
+ help="Data directory to use, on remote host, if unspecified"
+ " it will be '<rootDir>/data/db'", default=None)
mongodb_options.add_option("--logPath", dest="log_path",
- help=("Log path, on remote host, if unspecified"
- " it will be '<rootDir>/log/mongod.log'"), default=None)
+ help="Log path, on remote host, if unspecified"
+ " it will be '<rootDir>/log/mongod.log'", default=None)
# mongod options
mongod_options.add_option("--replSet", dest="repl_set",
- help=("Name of mongod single node replica set, if unpsecified mongod"
- " defaults to standalone node"), default=None)
+ help="Name of mongod single node replica set, if unpsecified mongod"
+ " defaults to standalone node", default=None)
# The current port used to start and connect to mongod. Not meant to be specified
# by the user.
@@ -1736,8 +1756,8 @@ Examples:
# The ports used on the 'server' side when in standard or secret mode.
mongod_options.add_option("--mongodUsablePorts", dest="usable_ports", nargs=2,
- help=("List of usable ports to be used by mongod for"
- " standard and secret modes, [default: %default]"), type="int",
+ help="List of usable ports to be used by mongod for"
+ " standard and secret modes, [default: %default]", type="int",
default=[27017, 37017])
mongod_options.add_option("--mongodOptions", dest="mongod_options",
@@ -1747,88 +1767,86 @@ Examples:
help="Set the FeatureCompatibilityVersion of mongod.", default=None)
mongod_options.add_option("--removeLockFile", dest="remove_lock_file",
- help=("If specified, the mongod.lock file will be deleted after a"
- " powercycle event, before mongod is started. This is a"
- " workaround for mongod failing start with MMAPV1 (See"
- " SERVER-15109)."), action="store_true", default=False)
+ help="If specified, the mongod.lock file will be deleted after a"
+ " powercycle event, before mongod is started. This is a"
+ " workaround for mongod failing start with MMAPV1 (See"
+ " SERVER-15109).", action="store_true", default=False)
# Client options
mongo_path = distutils.spawn.find_executable("mongo",
os.getcwd() + os.pathsep + os.environ["PATH"])
client_options.add_option("--mongoPath", dest="mongo_path",
- help=("Path to mongo (shell) executable, if unspecifed, mongo client"
- " is launched from the current directory."), default=mongo_path)
+ help="Path to mongo (shell) executable, if unspecifed, mongo client"
+ " is launched from the current directory.", default=mongo_path)
client_options.add_option("--mongoRepoRootDir", dest="mongo_repo_root_dir",
- help=("Root directory of mongoDB repository, defaults to current"
- " directory."), default=None)
+ help="Root directory of mongoDB repository, defaults to current"
+ " directory.", default=None)
client_options.add_option("--crudClient", dest="crud_client",
- help=("The path to the CRUD client script on the local host"
- " [default: '%default']."),
- default="jstests/hooks/crud_client.js")
+ help="The path to the CRUD client script on the local host"
+ " [default: '%default'].", default="jstests/hooks/crud_client.js")
with_external_server = "buildscripts/resmokeconfig/suites/with_external_server.yml"
- client_options.add_option(
- "--configCrudClient", dest="config_crud_client",
- help=("The path to the CRUD client configuration YML file on the"
- " local host. This is the resmoke.py suite file. If unspecified,"
- " a default configuration YML file (%default) will be used that"
- " provides a mongo (shell) DB connection to a running mongod."),
- default=with_external_server)
+ client_options.add_option("--configCrudClient", dest="config_crud_client",
+ help="The path to the CRUD client configuration YML file on the"
+ " local host. This is the resmoke.py suite file. If unspecified,"
+ " a default configuration YML file (%default) will be used that"
+ " provides a mongo (shell) DB connection to a running mongod.",
+ default=with_external_server)
client_options.add_option("--numCrudClients", dest="num_crud_clients",
- help=("The number of concurrent CRUD clients to run"
- " [default: '%default']."), type="int", default=1)
+ help="The number of concurrent CRUD clients to run"
+ " [default: '%default'].", type="int", default=1)
client_options.add_option("--numFsmClients", dest="num_fsm_clients",
- help=("The number of concurrent FSM clients to run"
- " [default: '%default']."), type="int", default=0)
+ help="The number of concurrent FSM clients to run"
+ " [default: '%default'].", type="int", default=0)
client_options.add_option("--fsmWorkloadFiles", dest="fsm_workload_files",
- help=("A list of the FSM workload files to execute. More than one"
- " file can be specified either in a comma-delimited string,"
- " or by specifying this option more than once. If unspecified,"
- " then all FSM workload files are executed."), action="append",
+ help="A list of the FSM workload files to execute. More than one"
+ " file can be specified either in a comma-delimited string,"
+ " or by specifying this option more than once. If unspecified,"
+ " then all FSM workload files are executed.", action="append",
default=[])
client_options.add_option("--fsmWorkloadBlacklistFiles", dest="fsm_workload_blacklist_files",
- help=("A list of the FSM workload files to blacklist. More than one"
- " file can be specified either in a comma-delimited string,"
- " or by specifying this option more than once. Note the"
- " file name is the basename, i.e., 'distinct.js'."),
- action="append", default=[])
+ help="A list of the FSM workload files to blacklist. More than one"
+ " file can be specified either in a comma-delimited string,"
+ " or by specifying this option more than once. Note the"
+ " file name is the basename, i.e., 'distinct.js'.", action="append",
+ default=[])
# Program options
program_options.add_option("--configFile", dest="config_file",
- help=("YAML configuration file of program options."
- " Option values are mapped to command line option names."
- " The command line option overrides any specified options"
- " from this file."), default=None)
+ help="YAML configuration file of program options."
+ " Option values are mapped to command line option names."
+ " The command line option overrides any specified options"
+ " from this file.", default=None)
program_options.add_option("--saveConfigOptions", dest="save_config_options",
- help=("Save the program options to a YAML configuration file."
- " If this options is specified the program only saves"
- " the configuration file and exits."), default=None)
+ help="Save the program options to a YAML configuration file."
+ " If this options is specified the program only saves"
+ " the configuration file and exits.", default=None)
- program_options.add_option("--reportJsonFile", dest="report_json_file",
- help=("Create or update the specified report file upon program"
- " exit."), default=None)
+ program_options.add_option("--reportJsonFile", dest="REPORT_JSON_FILE",
+ help="Create or update the specified report file upon program"
+ " exit.", default=None)
program_options.add_option("--remotePython", dest="remote_python",
- help=("The python intepreter to use on the remote host"
- " [default: '%default']."
- " To be able to use a python virtual environment,"
- " which has already been provisioned on the remote"
- " host, specify something similar to this:"
- " 'source venv/bin/activate; python'"), default="python")
+ help="The python intepreter to use on the remote host"
+ " [default: '%default']."
+ " To be able to use a python virtual environment,"
+ " which has already been provisioned on the remote"
+ " host, specify something similar to this:"
+ " 'source venv/bin/activate; python'", default="python")
program_options.add_option("--remoteSudo", dest="remote_sudo",
- help=("Use sudo on the remote host for priveleged operations."
- " [default: %default]."
- " For non-Windows systems, in order to perform privileged"
- " operations on the remote host, specify this, if the"
- " remote user is not able to perform root operations."),
+ help="Use sudo on the remote host for priveleged operations."
+ " [default: %default]."
+ " For non-Windows systems, in order to perform privileged"
+ " operations on the remote host, specify this, if the"
+ " remote user is not able to perform root operations.",
action="store_true", default=False)
log_levels = ["debug", "info", "warning", "error"]
@@ -1901,22 +1919,22 @@ Examples:
print("{}:{}".format(script_name, __version__))
sys.exit(0)
- if options.report_json_file:
- _report_json_file = options.report_json_file
- if _report_json_file and os.path.exists(_report_json_file):
- with open(_report_json_file) as jstream:
- _report_json = json.load(jstream)
+ if options.REPORT_JSON_FILE:
+ REPORT_JSON_FILE = options.REPORT_JSON_FILE
+ if REPORT_JSON_FILE and os.path.exists(REPORT_JSON_FILE):
+ with open(REPORT_JSON_FILE) as jstream:
+ REPORT_JSON = json.load(jstream)
else:
- _report_json = {
+ REPORT_JSON = {
"failures":
0, "results": [{
"status": "fail", "test_file": __name__, "exit_code": 0, "elapsed": 0,
"start": int(time.time()), "end": int(time.time())
}]
}
- LOGGER.debug("Updating/creating report JSON %s", _report_json)
+ LOGGER.debug("Updating/creating report JSON %s", REPORT_JSON)
# Disable this option such that the remote side does not generate report.json
- options.report_json_file = None
+ options.REPORT_JSON_FILE = None
# Setup the crash options
if options.crash_method == "mpower" and options.crash_option is None:
@@ -2065,7 +2083,7 @@ Examples:
# Establish EC2 connection if an instance_id is specified.
if options.instance_id:
- ec2 = aws_ec2.AwsEc2()
+ ec2 = aws_ec2.AwsEc2() # pylint: disable=undefined-variable
# Determine address_type if not using 'aws_ec2' crash_method.
if options.crash_method != "aws_ec2":
address_type = "public_ip_address"
@@ -2090,7 +2108,7 @@ Examples:
# Pass client_args to the remote script invocation.
client_args = ""
- for option in parser._get_all_options():
+ for option in parser._get_all_options(): # pylint: disable=protected-access
if option.dest:
option_value = getattr(options, option.dest, None)
if option_value != option.default:
@@ -2191,8 +2209,8 @@ Examples:
# Optionally validate canary document locally.
if validate_canary_local:
mongo = pymongo.MongoClient(**get_mongo_client_args(
- host=mongod_host, port=secret_port, serverSelectionTimeoutMS=one_hour_ms,
- socketTimeoutMS=one_hour_ms))
+ host=mongod_host, port=secret_port, server_selection_timeout_ms=one_hour_ms,
+ socket_timeout_ms=one_hour_ms))
ret = mongo_validate_canary(mongo, options.db_name, options.collection_name, canary_doc)
LOGGER.info("Local canary validation: %d", ret)
if ret:
@@ -2280,15 +2298,15 @@ Examples:
canary_doc = {"x": time.time()}
orig_canary_doc = copy.deepcopy(canary_doc)
mongo = pymongo.MongoClient(**get_mongo_client_args(
- host=mongod_host, port=standard_port, serverSelectionTimeoutMS=one_hour_ms,
- socketTimeoutMS=one_hour_ms))
+ host=mongod_host, port=standard_port, server_selection_timeout_ms=one_hour_ms,
+ socket_timeout_ms=one_hour_ms))
crash_canary["function"] = mongo_insert_canary
crash_canary["args"] = [mongo, options.db_name, options.collection_name, canary_doc]
ret, output = crash_server(options, crash_canary, standard_port, local_ops, script_name,
client_args)
# For internal crashes 'ret' is non-zero, because the ssh session unexpectedly terminates.
if options.crash_method != "internal" and ret:
- raise Exception("Crash of server failed: {}", format(output))
+ raise Exception("Crash of server failed: {}".format(output))
# Wait a bit after sending command to crash the server to avoid connecting to the
# server before the actual crash occurs.
time.sleep(10)
@@ -2331,7 +2349,7 @@ Examples:
if loop_num == options.num_loops or test_time >= options.test_time:
break
- _report_json_success = True
+ REPORT_JSON_SUCCESS = True
sys.exit(0)
diff --git a/pytests/requirements.txt b/pytests/requirements.txt
new file mode 100644
index 00000000000..3bb636e9613
--- /dev/null
+++ b/pytests/requirements.txt
@@ -0,0 +1,3 @@
+# powertest
+boto3 == 1.5.27
+psutil == 5.4.3