summaryrefslogtreecommitdiff
path: root/buildscripts
diff options
context:
space:
mode:
authorJonathan Abrahams <jonathan@mongodb.com>2018-03-26 11:25:04 -0400
committerJonathan Abrahams <jonathan@mongodb.com>2018-03-26 13:04:25 -0400
commit36148ad8bbdb94162b2926f4700d935ee4dc5994 (patch)
tree1d893c4ca0b0afa407f7724c7942dfbf643560af /buildscripts
parentd62d631f0ca40c5199fdfae2980080ca0cc982b5 (diff)
downloadmongo-36148ad8bbdb94162b2926f4700d935ee4dc5994.tar.gz
SERVER-23312 Format Python files with yapf
Diffstat (limited to 'buildscripts')
-rw-r--r--buildscripts/.pylintrc3
-rw-r--r--buildscripts/.style.yapf6
-rw-r--r--buildscripts/aggregate_tracefiles.py17
-rwxr-xr-xbuildscripts/aws_ec2.py187
-rw-r--r--buildscripts/burn_in_tests.py57
-rw-r--r--buildscripts/bypass_compile_and_fetch_binaries.py53
-rw-r--r--buildscripts/ciconfig/__init__.py1
-rw-r--r--buildscripts/ciconfig/evergreen.py14
-rw-r--r--buildscripts/ciconfig/tags.py10
-rwxr-xr-xbuildscripts/clang_format.py113
-rwxr-xr-xbuildscripts/collect_resource_info.py19
-rwxr-xr-xbuildscripts/combine_reports.py15
-rwxr-xr-xbuildscripts/errorcodes.py94
-rwxr-xr-xbuildscripts/eslint.py31
-rwxr-xr-xbuildscripts/evergreen_run_tests.py70
-rwxr-xr-xbuildscripts/fetch_test_lifecycle.py50
-rw-r--r--buildscripts/gdb/mongo.py5
-rw-r--r--buildscripts/gdb/mongo_lock.py28
-rw-r--r--buildscripts/gdb/mongo_printers.py19
-rwxr-xr-xbuildscripts/generate_compile_expansions_shared_cache.py11
-rw-r--r--buildscripts/git.py23
-rwxr-xr-xbuildscripts/hang_analyzer.py151
-rw-r--r--buildscripts/idl/idl/ast.py4
-rw-r--r--buildscripts/idl/idl/bson.py90
-rw-r--r--buildscripts/idl/idl/cpp_types.py84
-rw-r--r--buildscripts/idl/idl/enum_types.py51
-rw-r--r--buildscripts/idl/idl/errors.py83
-rw-r--r--buildscripts/idl/idl/generator.py126
-rw-r--r--buildscripts/idl/idl/parser.py103
-rw-r--r--buildscripts/idl/idl/struct_types.py97
-rw-r--r--buildscripts/idl/idl/syntax.py8
-rw-r--r--buildscripts/idl/idlc.py20
-rw-r--r--buildscripts/idl/tests/test_generator.py3
-rw-r--r--buildscripts/idl/tests/test_import.py43
-rw-r--r--buildscripts/idl/tests/test_parser.py20
-rw-r--r--buildscripts/jiraclient.py27
-rw-r--r--buildscripts/lint.py129
-rw-r--r--buildscripts/linter/yapf.py2
-rwxr-xr-xbuildscripts/make_archive.py34
-rw-r--r--buildscripts/make_vcxproj.py27
-rw-r--r--buildscripts/moduleconfig.py13
-rwxr-xr-xbuildscripts/mongosymb.py41
-rw-r--r--buildscripts/msitrim.py95
-rwxr-xr-xbuildscripts/packager-enterprise.py180
-rwxr-xr-xbuildscripts/packager.py436
-rw-r--r--buildscripts/promote_silent_failures.py12
-rw-r--r--buildscripts/prune_check.py11
-rwxr-xr-xbuildscripts/pylinters.py20
-rwxr-xr-xbuildscripts/remote_operations.py257
-rw-r--r--buildscripts/requirements.txt2
-rwxr-xr-xbuildscripts/resmoke.py19
-rw-r--r--buildscripts/resmokeconfig/loggers/__init__.py1
-rw-r--r--buildscripts/resmokeconfig/suites/__init__.py1
-rw-r--r--buildscripts/resmokelib/__init__.py2
-rw-r--r--buildscripts/resmokelib/config.py28
-rw-r--r--buildscripts/resmokelib/core/network.py6
-rw-r--r--buildscripts/resmokelib/core/process.py23
-rw-r--r--buildscripts/resmokelib/logging/buildlogger.py40
-rw-r--r--buildscripts/resmokelib/logging/flush.py1
-rw-r--r--buildscripts/resmokelib/logging/formatters.py2
-rw-r--r--buildscripts/resmokelib/logging/handlers.py8
-rw-r--r--buildscripts/resmokelib/logging/loggers.py18
-rw-r--r--buildscripts/resmokelib/parser.py63
-rw-r--r--buildscripts/resmokelib/selector.py72
-rw-r--r--buildscripts/resmokelib/sighandler.py7
-rw-r--r--buildscripts/resmokelib/suitesconfig.py10
-rw-r--r--buildscripts/resmokelib/testing/executor.py42
-rw-r--r--buildscripts/resmokelib/testing/fixtures/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/fixtures/interface.py4
-rw-r--r--buildscripts/resmokelib/testing/fixtures/replicaset.py46
-rw-r--r--buildscripts/resmokelib/testing/fixtures/shardedcluster.py82
-rw-r--r--buildscripts/resmokelib/testing/fixtures/standalone.py15
-rw-r--r--buildscripts/resmokelib/testing/hook_test_archival.py24
-rw-r--r--buildscripts/resmokelib/testing/hooks/__init__.py4
-rw-r--r--buildscripts/resmokelib/testing/hooks/cleanup.py4
-rw-r--r--buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py13
-rw-r--r--buildscripts/resmokelib/testing/hooks/dbhash.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/initialsync.py28
-rw-r--r--buildscripts/resmokelib/testing/hooks/interface.py1
-rw-r--r--buildscripts/resmokelib/testing/hooks/jsfile.py10
-rw-r--r--buildscripts/resmokelib/testing/hooks/oplog.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py56
-rw-r--r--buildscripts/resmokelib/testing/hooks/stepdown.py23
-rw-r--r--buildscripts/resmokelib/testing/hooks/validate.py7
-rw-r--r--buildscripts/resmokelib/testing/job.py4
-rw-r--r--buildscripts/resmokelib/testing/report.py4
-rw-r--r--buildscripts/resmokelib/testing/suite.py37
-rw-r--r--buildscripts/resmokelib/testing/summary.py7
-rw-r--r--buildscripts/resmokelib/testing/testcases/__init__.py1
-rw-r--r--buildscripts/resmokelib/testing/testcases/benchmark_test.py14
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_integration_test.py8
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_unittest.py9
-rw-r--r--buildscripts/resmokelib/testing/testcases/dbtest.py12
-rw-r--r--buildscripts/resmokelib/testing/testcases/fsm_workload_test.py14
-rw-r--r--buildscripts/resmokelib/testing/testcases/interface.py5
-rw-r--r--buildscripts/resmokelib/testing/testcases/json_schema_test.py14
-rw-r--r--buildscripts/resmokelib/testing/testcases/jsrunnerfile.py13
-rw-r--r--buildscripts/resmokelib/testing/testcases/jstest.py33
-rw-r--r--buildscripts/resmokelib/testing/testcases/mongos_test.py7
-rw-r--r--buildscripts/resmokelib/testing/testcases/sleeptest.py4
-rw-r--r--buildscripts/resmokelib/utils/archival.py86
-rw-r--r--buildscripts/resmokelib/utils/globstar.py1
-rw-r--r--buildscripts/resmokelib/utils/jscomment.py5
-rw-r--r--buildscripts/resmokelib/utils/queue.py1
-rw-r--r--buildscripts/resmokelib/utils/registry.py7
-rwxr-xr-xbuildscripts/scons.py2
-rw-r--r--buildscripts/scons_cache_prune.py12
-rwxr-xr-xbuildscripts/setup_multiversion_mongodb.py97
-rwxr-xr-xbuildscripts/test_failures.py126
-rw-r--r--buildscripts/tests/__init__.py1
-rw-r--r--buildscripts/tests/ciconfig/__init__.py1
-rw-r--r--buildscripts/tests/ciconfig/test_evergreen.py32
-rw-r--r--buildscripts/tests/ciconfig/test_tags.py3
-rw-r--r--buildscripts/tests/resmokelib/__init__.py1
-rw-r--r--buildscripts/tests/resmokelib/logging/test_buildlogger.py13
-rw-r--r--buildscripts/tests/resmokelib/test_archival.py81
-rw-r--r--buildscripts/tests/resmokelib/test_selector.py288
-rw-r--r--buildscripts/tests/resmokelib/testing/__init__.py1
-rw-r--r--buildscripts/tests/resmokelib/testing/fixtures/test_interface.py1
-rw-r--r--buildscripts/tests/resmokelib/testing/hooks/__init__.py1
-rwxr-xr-xbuildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py56
-rwxr-xr-xbuildscripts/tests/test_aws_ec2.py125
-rw-r--r--buildscripts/tests/test_fetch_test_lifecycle.py57
-rw-r--r--buildscripts/tests/test_git.py1
-rwxr-xr-xbuildscripts/tests/test_remote_operations.py32
-rw-r--r--buildscripts/tests/test_test_failures.py759
-rw-r--r--buildscripts/tests/test_update_test_lifecycle.py323
-rwxr-xr-xbuildscripts/update_test_lifecycle.py304
-rw-r--r--buildscripts/utils.py39
129 files changed, 2803 insertions, 3510 deletions
diff --git a/buildscripts/.pylintrc b/buildscripts/.pylintrc
index 686543fb789..23cbf07e3cf 100644
--- a/buildscripts/.pylintrc
+++ b/buildscripts/.pylintrc
@@ -1,10 +1,11 @@
# See https://www.pylint.org/
[MESSAGES CONTROL]
# C0301 - line-too-long - some of the type annotations are longer then 100 columns
+# C0330 - bad-continuation - ignore conflicts produced by yapf formatting
# E0401 - import-error - ignore imports that fail to load
# I0011 - locally-disabled - ignore warnings about disable pylint checks
# R0903 - too-few-public-method - pylint does not always know best
# W0511 - fixme - ignore TODOs in comments
# W0611 - unused-import - typing module is needed for mypy
-disable=fixme,import-error,line-too-long,locally-disabled,too-few-public-methods,unused-import
+disable=bad-continuation,fixme,import-error,line-too-long,locally-disabled,too-few-public-methods,unused-import
diff --git a/buildscripts/.style.yapf b/buildscripts/.style.yapf
deleted file mode 100644
index 12534bdeead..00000000000
--- a/buildscripts/.style.yapf
+++ /dev/null
@@ -1,6 +0,0 @@
-# See https://github.com/google/yapf
-[style]
-based_on_style = pep8
-column_limit = 100
-indent_dictionary_value = True
-
diff --git a/buildscripts/aggregate_tracefiles.py b/buildscripts/aggregate_tracefiles.py
index 8f1db7851c9..8ff46194ebc 100644
--- a/buildscripts/aggregate_tracefiles.py
+++ b/buildscripts/aggregate_tracefiles.py
@@ -2,11 +2,12 @@ import subprocess
import os
import sys
from optparse import OptionParser
-
""" This script aggregates several tracefiles into one tracefile
All but the last argument are input tracefiles or .txt files which list tracefiles.
The last argument is the tracefile to which the output will be written
"""
+
+
def aggregate(inputs, output):
"""Aggregates the tracefiles given in inputs to a tracefile given by output"""
args = ['lcov']
@@ -17,18 +18,20 @@ def aggregate(inputs, output):
args += ['-o', output]
print ' '.join(args)
-
- return subprocess.call(args)
+
+ return subprocess.call(args)
+
def getfilesize(path):
if not os.path.isfile(path):
return 0
return os.path.getsize(path)
-def main ():
+
+def main():
inputs = []
- usage = "usage: %prog input1.info input2.info ... output.info"
+ usage = "usage: %prog input1.info input2.info ... output.info"
parser = OptionParser(usage=usage)
(options, args) = parser.parse_args()
@@ -43,12 +46,12 @@ def main ():
inputs.append(path)
elif ext == '.txt':
- inputs += [line.strip() for line in open(path)
- if getfilesize(line.strip()) > 0]
+ inputs += [line.strip() for line in open(path) if getfilesize(line.strip()) > 0]
else:
return "unrecognized file type"
return aggregate(inputs, args[-1])
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/buildscripts/aws_ec2.py b/buildscripts/aws_ec2.py
index 4a92e5c9633..6e36d0ee055 100755
--- a/buildscripts/aws_ec2.py
+++ b/buildscripts/aws_ec2.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""AWS EC2 instance launcher and controller."""
from __future__ import print_function
@@ -21,16 +20,9 @@ class AwsEc2(object):
"""Class to support controlling AWS EC2 istances."""
InstanceStatus = collections.namedtuple("InstanceStatus", [
- "instance_id",
- "image_id",
- "instance_type",
- "state",
- "private_ip_address",
- "public_ip_address",
- "private_dns_name",
- "public_dns_name",
- "tags"
- ])
+ "instance_id", "image_id", "instance_type", "state", "private_ip_address",
+ "public_ip_address", "private_dns_name", "public_dns_name", "tags"
+ ])
def __init__(self):
try:
@@ -46,8 +38,7 @@ class AwsEc2(object):
"""Wait up to 'wait_time_secs' for instance to be in 'state'.
Return 0 if 'state' reached, 1 otherwise."""
if show_progress:
- print("Waiting for instance {} to reach '{}' state".format(instance, state),
- end="",
+ print("Waiting for instance {} to reach '{}' state".format(instance, state), end="",
file=sys.stdout)
reached_state = False
end_time = time.time() + wait_time_secs
@@ -83,8 +74,7 @@ class AwsEc2(object):
def control_instance(self, mode, image_id, wait_time_secs=0, show_progress=False):
"""Controls an AMI instance. Returns 0 & status information, if successful."""
if mode not in _MODES:
- raise ValueError(
- "Invalid mode '{}' specified, choose from {}.".format(mode, _MODES))
+ raise ValueError("Invalid mode '{}' specified, choose from {}.".format(mode, _MODES))
sys.stdout.flush()
instance = self.connection.Instance(image_id)
@@ -112,23 +102,17 @@ class AwsEc2(object):
ret = 0
if wait_time_secs > 0:
- ret = self.wait_for_state(
- instance=instance,
- state=state,
- wait_time_secs=wait_time_secs,
- show_progress=show_progress)
+ ret = self.wait_for_state(instance=instance, state=state, wait_time_secs=wait_time_secs,
+ show_progress=show_progress)
try:
# Always provide status after executing command.
status = self.InstanceStatus(
- getattr(instance, "instance_id", None),
- getattr(instance, "image_id", None),
- getattr(instance, "instance_type", None),
- getattr(instance, "state", None),
+ getattr(instance, "instance_id", None), getattr(instance, "image_id", None),
+ getattr(instance, "instance_type", None), getattr(instance, "state", None),
getattr(instance, "private_ip_address", None),
getattr(instance, "public_ip_address", None),
getattr(instance, "private_dns_name", None),
- getattr(instance, "public_dns_name", None),
- getattr(instance, "tags", None))
+ getattr(instance, "public_dns_name", None), getattr(instance, "tags", None))
except botocore.exceptions.ClientError as err:
return 1, err.message
@@ -151,18 +135,9 @@ class AwsEc2(object):
time.sleep(i + 1)
instance.create_tags(Tags=tags)
- def launch_instance(self,
- ami,
- instance_type,
- block_devices=None,
- key_name=None,
- security_group_ids=None,
- security_groups=None,
- subnet_id=None,
- tags=None,
- wait_time_secs=0,
- show_progress=False,
- **kwargs):
+ def launch_instance(self, ami, instance_type, block_devices=None, key_name=None,
+ security_group_ids=None, security_groups=None, subnet_id=None, tags=None,
+ wait_time_secs=0, show_progress=False, **kwargs):
"""Launches and tags an AMI instance.
Returns the tuple (0, status_information), if successful."""
@@ -187,22 +162,15 @@ class AwsEc2(object):
kwargs["KeyName"] = key_name
try:
- instances = self.connection.create_instances(
- ImageId=ami,
- InstanceType=instance_type,
- MaxCount=1,
- MinCount=1,
- **kwargs)
+ instances = self.connection.create_instances(ImageId=ami, InstanceType=instance_type,
+ MaxCount=1, MinCount=1, **kwargs)
except (botocore.exceptions.ClientError, botocore.exceptions.ParamValidationError) as err:
return 1, err.message
instance = instances[0]
if wait_time_secs > 0:
- self.wait_for_state(
- instance=instance,
- state="running",
- wait_time_secs=wait_time_secs,
- show_progress=show_progress)
+ self.wait_for_state(instance=instance, state="running", wait_time_secs=wait_time_secs,
+ show_progress=show_progress)
self.tag_instance(instance.instance_id, tags)
@@ -218,93 +186,60 @@ def main():
control_options = optparse.OptionGroup(parser, "Control options")
create_options = optparse.OptionGroup(parser, "Create options")
- parser.add_option("--mode",
- dest="mode",
- choices=_MODES,
- default="status",
- help="Operations to perform on an EC2 instance, choose one of"
- " '{}', defaults to '%default'.".format(", ".join(_MODES)))
+ parser.add_option("--mode", dest="mode", choices=_MODES, default="status",
+ help=("Operations to perform on an EC2 instance, choose one of"
+ " '{}', defaults to '%default'.".format(", ".join(_MODES))))
- control_options.add_option("--imageId",
- dest="image_id",
- default=None,
+ control_options.add_option("--imageId", dest="image_id", default=None,
help="EC2 image_id to perform operation on [REQUIRED for control].")
- control_options.add_option("--waitTimeSecs",
- dest="wait_time_secs",
- type=int,
- default=5 * 60,
- help="Time to wait for EC2 instance to reach it's new state,"
- " defaults to '%default'.")
+ control_options.add_option("--waitTimeSecs", dest="wait_time_secs", type=int, default=5 * 60,
+ help=("Time to wait for EC2 instance to reach it's new state,"
+ " defaults to '%default'."))
- create_options.add_option("--ami",
- dest="ami",
- default=None,
+ create_options.add_option("--ami", dest="ami", default=None,
help="EC2 AMI to launch [REQUIRED for create].")
- create_options.add_option("--blockDevice",
- dest="block_devices",
- metavar="DEVICE-NAME DEVICE-SIZE-GB",
- action="append",
- default=[],
+ create_options.add_option("--blockDevice", dest="block_devices",
+ metavar="DEVICE-NAME DEVICE-SIZE-GB", action="append", default=[],
nargs=2,
- help="EBS device name and volume size in GiB."
- " More than one device can be attached, by specifying"
- " this option more than once."
- " The device will be deleted on termination of the instance.")
-
- create_options.add_option("--instanceType",
- dest="instance_type",
- default="t1.micro",
+ help=("EBS device name and volume size in GiB."
+ " More than one device can be attached, by specifying"
+ " this option more than once."
+ " The device will be deleted on termination of the instance."))
+
+ create_options.add_option("--instanceType", dest="instance_type", default="t1.micro",
help="EC2 instance type to launch, defaults to '%default'.")
- create_options.add_option("--keyName",
- dest="key_name",
- default=None,
+ create_options.add_option("--keyName", dest="key_name", default=None,
help="EC2 key name [REQUIRED for create].")
- create_options.add_option("--securityGroupIds",
- dest="security_group_ids",
- action="append",
+ create_options.add_option("--securityGroupIds", dest="security_group_ids", action="append",
default=[],
- help="EC2 security group ids. More than one security group id can be"
- " added, by specifying this option more than once.")
+ help=("EC2 security group ids. More than one security group id can be"
+ " added, by specifying this option more than once."))
- create_options.add_option("--securityGroup",
- dest="security_groups",
- action="append",
+ create_options.add_option("--securityGroup", dest="security_groups", action="append",
default=[],
- help="EC2 security group. More than one security group can be added,"
- " by specifying this option more than once.")
+ help=("EC2 security group. More than one security group can be added,"
+ " by specifying this option more than once."))
- create_options.add_option("--subnetId",
- dest="subnet_id",
- default=None,
+ create_options.add_option("--subnetId", dest="subnet_id", default=None,
help="EC2 subnet id to use in VPC.")
- create_options.add_option("--tagExpireHours",
- dest="tag_expire_hours",
- type=int,
- default=2,
+ create_options.add_option("--tagExpireHours", dest="tag_expire_hours", type=int, default=2,
help="EC2 tag expire time in hours, defaults to '%default'.")
- create_options.add_option("--tagName",
- dest="tag_name",
- default="",
+ create_options.add_option("--tagName", dest="tag_name", default="",
help="EC2 tag and instance name.")
- create_options.add_option("--tagOwner",
- dest="tag_owner",
- default="",
- help="EC2 tag owner.")
+ create_options.add_option("--tagOwner", dest="tag_owner", default="", help="EC2 tag owner.")
- create_options.add_option("--extraArgs",
- dest="extra_args",
- metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- default=None,
- help="EC2 create instance keyword args. The argument is specified as"
- " bracketed YAML - i.e. JSON with support for single quoted"
- " and unquoted keys. Example, '{DryRun: True}'")
+ create_options.add_option(
+ "--extraArgs", dest="extra_args", metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
+ default=None, help=("EC2 create instance keyword args. The argument is specified as"
+ " bracketed YAML - i.e. JSON with support for single quoted"
+ " and unquoted keys. Example, '{DryRun: True}'"))
parser.add_option_group(control_options)
parser.add_option_group(create_options)
@@ -331,34 +266,25 @@ def main():
# The 'expire-on' key is a UTC time.
expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=options.tag_expire_hours)
tags = [{"Key": "expire-on", "Value": expire_dt.strftime("%Y-%m-%d %H:%M:%S")},
- {"Key": "Name", "Value": options.tag_name},
- {"Key": "owner", "Value": options.tag_owner}]
+ {"Key": "Name",
+ "Value": options.tag_name}, {"Key": "owner", "Value": options.tag_owner}]
my_kwargs = {}
if options.extra_args is not None:
my_kwargs = yaml.safe_load(options.extra_args)
(ret_code, instance_status) = aws_ec2.launch_instance(
- ami=options.ami,
- instance_type=options.instance_type,
- block_devices=block_devices,
- key_name=options.key_name,
- security_group_ids=options.security_group_ids,
- security_groups=options.security_groups,
- subnet_id=options.subnet_id,
- tags=tags,
- wait_time_secs=options.wait_time_secs,
- show_progress=True,
- **my_kwargs)
+ ami=options.ami, instance_type=options.instance_type, block_devices=block_devices,
+ key_name=options.key_name, security_group_ids=options.security_group_ids,
+ security_groups=options.security_groups, subnet_id=options.subnet_id, tags=tags,
+ wait_time_secs=options.wait_time_secs, show_progress=True, **my_kwargs)
else:
if not getattr(options, "image_id", None):
parser.print_help()
parser.error("Missing required control option")
(ret_code, instance_status) = aws_ec2.control_instance(
- mode=options.mode,
- image_id=options.image_id,
- wait_time_secs=options.wait_time_secs,
+ mode=options.mode, image_id=options.image_id, wait_time_secs=options.wait_time_secs,
show_progress=True)
print("Return code: {}, Instance status:".format(ret_code))
@@ -370,5 +296,6 @@ def main():
sys.exit(ret_code)
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/burn_in_tests.py b/buildscripts/burn_in_tests.py
index fa37a810190..87a4098b87b 100644
--- a/buildscripts/burn_in_tests.py
+++ b/buildscripts/burn_in_tests.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Command line utility for determining what jstests have been added or modified
"""
@@ -19,14 +18,12 @@ import sys
import urlparse
import yaml
-
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from buildscripts import resmokelib
from buildscripts.ciconfig import evergreen
-
API_SERVER_DEFAULT = "https://evergreen.mongodb.com"
@@ -43,12 +40,12 @@ def parse_command_line():
help="The base commit to compare to for determining changes.")
parser.add_option("--buildVariant", dest="buildvariant",
- help="The buildvariant the tasks will execute on. \
- Required when generating the JSON file with test executor information")
+ help=("The buildvariant the tasks will execute on. Required when"
+ " generating the JSON file with test executor information"))
parser.add_option("--checkEvergreen", dest="check_evergreen", action="store_true",
- help="Checks Evergreen for the last commit that was scheduled. \
- This way all the tests that haven't been burned in will be run.")
+ help=("Checks Evergreen for the last commit that was scheduled."
+ " This way all the tests that haven't been burned in will be run."))
parser.add_option("--noExec", dest="no_exec", action="store_true",
help="Do not run resmoke loop on new tests.")
@@ -64,18 +61,10 @@ def parse_command_line():
# The executor_file and suite_files defaults are required to make the
# suite resolver work correctly.
- parser.set_defaults(base_commit=None,
- branch="master",
- buildvariant=None,
- check_evergreen=False,
- evergreen_file="etc/evergreen.yml",
- selector_file="etc/burn_in_tests.yml",
- max_revisions=25,
- no_exec=False,
- executor_file=None,
- report_file="report.json",
- suite_files="with_server",
- test_list_file=None,
+ parser.set_defaults(base_commit=None, branch="master", buildvariant=None, check_evergreen=False,
+ evergreen_file="etc/evergreen.yml", selector_file="etc/burn_in_tests.yml",
+ max_revisions=25, no_exec=False, executor_file=None,
+ report_file="report.json", suite_files="with_server", test_list_file=None,
test_list_outfile=None)
# This disables argument parsing on the first unrecognized parameter. This allows us to pass
@@ -96,7 +85,8 @@ def read_evg_config():
file_list = [
"./.evergreen.yml",
os.path.expanduser("~/.evergreen.yml"),
- os.path.expanduser("~/cli_bin/.evergreen.yml")]
+ os.path.expanduser("~/cli_bin/.evergreen.yml")
+ ]
for filename in file_list:
if os.path.isfile(filename):
@@ -153,8 +143,8 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
# The current commit will be activated in Evergreen; we use --skip to start at the
# previous commit when trying to find the most recent preceding commit that has been
# activated.
- revs_to_check = callo(["git", "rev-list", base_commit,
- "--max-count=200", "--skip=1"]).splitlines()
+ revs_to_check = callo(["git", "rev-list", base_commit, "--max-count=200",
+ "--skip=1"]).splitlines()
last_activated = find_last_activated_task(revs_to_check, buildvariant, branch_name)
if last_activated is None:
# When the current commit is the first time 'buildvariant' has run, there won't be a
@@ -210,8 +200,8 @@ def find_exclude_tests(selector_file):
try:
js_test = yml['selector']['js_test']
except KeyError:
- raise Exception("The selector file " + selector_file +
- " is missing the 'selector.js_test' key")
+ raise Exception(
+ "The selector file " + selector_file + " is missing the 'selector.js_test' key")
return (resmokelib.utils.default_if_none(js_test.get("exclude_suites"), []),
resmokelib.utils.default_if_none(js_test.get("exclude_tasks"), []),
@@ -299,10 +289,7 @@ def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
for task_name, task_arg in variant_task_args.items():
# Find the resmoke_args for matching suite names.
if re.compile('--suites=' + suite + '(?:\s+|$)').match(task_arg):
- tasks_to_run[task_name] = {
- "resmoke_args": task_arg,
- "tests": suites[suite]
- }
+ tasks_to_run[task_name] = {"resmoke_args": task_arg, "tests": suites[suite]}
return tasks_to_run
@@ -371,11 +358,8 @@ def main():
"\t", "\n\t".join(sorted(evergreen_conf.variant_names))
sys.exit(1)
- changed_tests = find_changed_tests(values.branch,
- values.base_commit,
- values.max_revisions,
- values.buildvariant,
- values.check_evergreen)
+ changed_tests = find_changed_tests(values.branch, values.base_commit, values.max_revisions,
+ values.buildvariant, values.check_evergreen)
exclude_suites, exclude_tasks, exclude_tests = find_exclude_tests(values.selector_file)
changed_tests = filter_tests(changed_tests, exclude_tests)
# If there are no changed tests, exit cleanly.
@@ -385,12 +369,9 @@ def main():
_write_report_file({}, values.test_list_outfile)
sys.exit(0)
suites = resmokelib.suitesconfig.get_suites(
- suite_files=values.suite_files.split(","),
- test_files=changed_tests)
+ suite_files=values.suite_files.split(","), test_files=changed_tests)
tests_by_executor = create_executor_list(suites, exclude_suites)
- tests_by_task = create_task_list(evergreen_conf,
- values.buildvariant,
- tests_by_executor,
+ tests_by_task = create_task_list(evergreen_conf, values.buildvariant, tests_by_executor,
exclude_tasks)
if values.test_list_outfile is not None:
_write_report_file(tests_by_task, values.test_list_outfile)
diff --git a/buildscripts/bypass_compile_and_fetch_binaries.py b/buildscripts/bypass_compile_and_fetch_binaries.py
index ca184e1e046..0612367d498 100644
--- a/buildscripts/bypass_compile_and_fetch_binaries.py
+++ b/buildscripts/bypass_compile_and_fetch_binaries.py
@@ -85,17 +85,17 @@ def generate_bypass_expansions(project, build_variant, revision, build_id):
# With compile bypass we need to update the URL to point to the correct name of the base commit
# binaries.
expansions["mongo_binaries"] = (archive_name("{}/{}/{}/binaries/mongo-{}".format(
- project, build_variant, revision, build_id)))
+ project, build_variant, revision, build_id)))
# With compile bypass we need to update the URL to point to the correct name of the base commit
# debug symbols.
expansions["mongo_debugsymbols"] = (archive_name("{}/{}/{}/debugsymbols/debugsymbols-{}".format(
- project, build_variant, revision, build_id)))
+ project, build_variant, revision, build_id)))
# With compile bypass we need to update the URL to point to the correct name of the base commit
# mongo shell.
expansions["mongo_shell"] = (archive_name("{}/{}/{}/binaries/mongo-shell-{}".format(
- project, build_variant, revision, build_id)))
+ project, build_variant, revision, build_id)))
# Enable bypass compile
expansions["bypass_compile"] = True
@@ -155,9 +155,8 @@ def should_bypass_compile():
if os.path.isdir(filename):
continue
- if (filename in requires_compile_files
- or any(filename.startswith(directory)
- for directory in requires_compile_directories)):
+ if (filename in requires_compile_files or any(
+ filename.startswith(directory) for directory in requires_compile_directories)):
print("Compile bypass disabled after detecting {} as being modified because"
" it is a file known to affect compilation.".format(filename))
return False
@@ -173,28 +172,21 @@ def should_bypass_compile():
def parse_args():
parser = argparse.ArgumentParser()
- parser.add_argument("--project",
- required=True,
+ parser.add_argument("--project", required=True,
help="The Evergreen project. e.g mongodb-mongo-master")
- parser.add_argument("--buildVariant",
- required=True,
+ parser.add_argument("--buildVariant", required=True,
help="The build variant. e.g enterprise-rhel-62-64-bit")
- parser.add_argument("--revision",
- required=True,
- help="The base commit hash.")
+ parser.add_argument("--revision", required=True, help="The base commit hash.")
- parser.add_argument("--patchFile",
- required=True,
+ parser.add_argument("--patchFile", required=True,
help="A list of all files modified in patch build.")
- parser.add_argument("--outFile",
- required=True,
+ parser.add_argument("--outFile", required=True,
help="The YAML file to write out the macro expansions.")
- parser.add_argument("--jsonArtifact",
- required=True,
+ parser.add_argument("--jsonArtifact", required=True,
help="The JSON file to write out the metadata of files to attach to task.")
return parser.parse_args()
@@ -224,7 +216,7 @@ def main():
api_server = "{url.scheme}://{url.netloc}".format(
url=urlparse(evg_config.get("api_server_host")))
revision_url = "{}/rest/v1/projects/{}/revisions/{}".format(api_server, args.project,
- args.revision)
+ args.revision)
revisions = requests_get_json(revision_url)
match = None
@@ -240,7 +232,7 @@ def main():
break
else:
print("Could not find build id for revision {} on project {}."
- " Default compile bypass to false.".format(args.revision, args.project))
+ " Default compile bypass to false.".format(args.revision, args.project))
return
# Generate the compile task id.
@@ -270,16 +262,20 @@ def main():
return
# Need to extract certain files from the pre-existing artifacts.tgz.
- extract_files = [executable_name("dbtest"), executable_name("mongobridge"),
- "build/integration_tests.txt"]
+ extract_files = [
+ executable_name("dbtest"),
+ executable_name("mongobridge"),
+ "build/integration_tests.txt",
+ ]
with tarfile.open(filename, "r:gz") as tar:
# The repo/ directory contains files needed by the package task. May
# need to add other files that would otherwise be generated by SCons
# if we did not bypass compile.
- subdir = [tarinfo for tarinfo in tar.getmembers()
- if tarinfo.name.startswith("build/integration_tests/")
- or tarinfo.name.startswith("repo/")
- or tarinfo.name in extract_files]
+ subdir = [
+ tarinfo for tarinfo in tar.getmembers()
+ if tarinfo.name.startswith("build/integration_tests/")
+ or tarinfo.name.startswith("repo/") or tarinfo.name in extract_files
+ ]
print("Extracting the following files from {0}...\n{1}".format(
filename, "\n".join(tarinfo.name for tarinfo in subdir)))
tar.extractall(members=subdir)
@@ -318,8 +314,9 @@ def main():
# Need to apply these expansions for bypassing SCons.
expansions = generate_bypass_expansions(args.project, args.buildVariant, args.revision,
- build_id)
+ build_id)
write_out_bypass_compile_expansions(args.outFile, **expansions)
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/ciconfig/__init__.py b/buildscripts/ciconfig/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/ciconfig/__init__.py
+++ b/buildscripts/ciconfig/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/ciconfig/evergreen.py b/buildscripts/ciconfig/evergreen.py
index 9a4f7a9ed99..3c41d6e2012 100644
--- a/buildscripts/ciconfig/evergreen.py
+++ b/buildscripts/ciconfig/evergreen.py
@@ -20,8 +20,10 @@ class EvergreenProjectConfig(object):
self.path = path
self.tasks = [Task(task_dict) for task_dict in self._conf["tasks"]]
self._tasks_by_name = {task.name: task for task in self.tasks}
- self.variants = [Variant(variant_dict, self._tasks_by_name)
- for variant_dict in self._conf["buildvariants"]]
+ self.variants = [
+ Variant(variant_dict, self._tasks_by_name)
+ for variant_dict in self._conf["buildvariants"]
+ ]
self._variants_by_name = {variant.name: variant for variant in self.variants}
self.distro_names = set()
for variant in self.variants:
@@ -106,8 +108,10 @@ class Variant(object):
def __init__(self, conf_dict, task_map):
self.raw = conf_dict
run_on = self.run_on
- self.tasks = [VariantTask(task_map.get(t["name"]), t.get("distros", run_on), self)
- for t in conf_dict["tasks"]]
+ self.tasks = [
+ VariantTask(task_map.get(t["name"]), t.get("distros", run_on), self)
+ for t in conf_dict["tasks"]
+ ]
self.distro_names = set(run_on)
for task in self.tasks:
self.distro_names.update(task.run_on)
@@ -176,6 +180,7 @@ class Variant(object):
class VariantTask(Task):
"""Represent a task definition in the context of a build variant."""
+
def __init__(self, task, run_on, variant):
Task.__init__(self, task.raw)
self.run_on = run_on
@@ -199,7 +204,6 @@ class VariantTask(Task):
class ResmokeArgs(object):
-
@staticmethod
def get_arg(resmoke_args, name):
"""Return the value of the option --'name' in the 'resmoke_args' string or
diff --git a/buildscripts/ciconfig/tags.py b/buildscripts/ciconfig/tags.py
index 418d0e3c61b..7e9688714f5 100644
--- a/buildscripts/ciconfig/tags.py
+++ b/buildscripts/ciconfig/tags.py
@@ -14,7 +14,9 @@ import yaml
def _represent_dict_order(self, data):
return self.represent_mapping("tag:yaml.org,2002:map", data.items())
+
yaml.add_representer(collections.OrderedDict, _represent_dict_order)
+
# End setup
@@ -108,11 +110,8 @@ class TagsConfig(object):
"""
with open(filename, "w") as fstream:
if preamble:
- print(textwrap.fill(preamble,
- width=100,
- initial_indent="# ",
- subsequent_indent="# "),
- file=fstream)
+ print(textwrap.fill(preamble, width=100, initial_indent="# ",
+ subsequent_indent="# "), file=fstream)
# We use yaml.safe_dump() in order avoid having strings being written to the file as
# "!!python/unicode ..." and instead have them written as plain 'str' instances.
@@ -138,4 +137,3 @@ def setdefault(doc, key, default):
else:
doc[key] = default
return default
-
diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py
index c3821722ff1..82496a07953 100755
--- a/buildscripts/clang_format.py
+++ b/buildscripts/clang_format.py
@@ -52,7 +52,9 @@ CLANG_FORMAT_HTTP_LINUX_CACHE = "https://s3.amazonaws.com/boxes.10gen.com/build/
CLANG_FORMAT_HTTP_DARWIN_CACHE = "https://s3.amazonaws.com/boxes.10gen.com/build/clang%2Bllvm-3.8.0-x86_64-apple-darwin.tar.xz"
# Path in the tarball to the clang-format binary
-CLANG_FORMAT_SOURCE_TAR_BASE = string.Template("clang+llvm-$version-$tar_path/bin/" + CLANG_FORMAT_PROGNAME)
+CLANG_FORMAT_SOURCE_TAR_BASE = string.Template(
+ "clang+llvm-$version-$tar_path/bin/" + CLANG_FORMAT_PROGNAME)
+
##############################################################################
def callo(args):
@@ -60,18 +62,18 @@ def callo(args):
"""
return subprocess.check_output(args)
+
def get_tar_path(version, tar_path):
""" Get the path to clang-format in the llvm tarball
"""
- return CLANG_FORMAT_SOURCE_TAR_BASE.substitute(
- version=version,
- tar_path=tar_path)
+ return CLANG_FORMAT_SOURCE_TAR_BASE.substitute(version=version, tar_path=tar_path)
+
def extract_clang_format(tar_path):
# Extract just the clang-format binary
# On OSX, we shell out to tar because tarfile doesn't support xz compression
if sys.platform == 'darwin':
- subprocess.call(['tar', '-xzf', tar_path, '*clang-format*'])
+ subprocess.call(['tar', '-xzf', tar_path, '*clang-format*'])
# Otherwise we use tarfile because some versions of tar don't support wildcards without
# a special flag
else:
@@ -81,6 +83,7 @@ def extract_clang_format(tar_path):
tarfp.extract(name)
tarfp.close()
+
def get_clang_format_from_cache_and_extract(url, tarball_ext):
"""Get clang-format from mongodb's cache
and extract the tarball
@@ -89,8 +92,8 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
temp_tar_file = os.path.join(dest_dir, "temp.tar" + tarball_ext)
# Download from file
- print("Downloading clang-format %s from %s, saving to %s" % (CLANG_FORMAT_VERSION,
- url, temp_tar_file))
+ print("Downloading clang-format %s from %s, saving to %s" % (CLANG_FORMAT_VERSION, url,
+ temp_tar_file))
# Retry download up to 5 times.
num_tries = 5
@@ -98,7 +101,7 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
try:
resp = urllib2.urlopen(url)
with open(temp_tar_file, 'wb') as f:
- f.write(resp.read())
+ f.write(resp.read())
break
except urllib2.URLError:
if attempt == num_tries - 1:
@@ -107,6 +110,7 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
extract_clang_format(temp_tar_file)
+
def get_clang_format_from_darwin_cache(dest_file):
"""Download clang-format from llvm.org, unpack the tarball,
and put clang-format in the specified place
@@ -116,6 +120,7 @@ def get_clang_format_from_darwin_cache(dest_file):
# Destination Path
shutil.move(get_tar_path(CLANG_FORMAT_VERSION, "x86_64-apple-darwin"), dest_file)
+
def get_clang_format_from_linux_cache(dest_file):
"""Get clang-format from mongodb's cache
"""
@@ -124,10 +129,12 @@ def get_clang_format_from_linux_cache(dest_file):
# Destination Path
shutil.move("build/bin/clang-format", dest_file)
+
class ClangFormat(object):
"""Class encapsulates finding a suitable copy of clang-format,
and linting/formating an individual file
"""
+
def __init__(self, path, cache_dir):
self.path = None
clang_format_progname_ext = ""
@@ -154,10 +161,10 @@ class ClangFormat(object):
# Check for various versions staring with binaries with version specific suffixes in the
# user's path
programs = [
- CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION,
- CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_SHORT_VERSION,
- CLANG_FORMAT_PROGNAME,
- ]
+ CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION,
+ CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_SHORT_VERSION,
+ CLANG_FORMAT_PROGNAME,
+ ]
if sys.platform == "win32":
for i in range(len(programs)):
@@ -178,7 +185,7 @@ class ClangFormat(object):
programfiles = [
os.environ["ProgramFiles"],
os.environ["ProgramFiles(x86)"],
- ]
+ ]
for programfile in programfiles:
win32bin = os.path.join(programfile, "LLVM\\bin\\clang-format.exe")
@@ -191,7 +198,9 @@ class ClangFormat(object):
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
- self.path = os.path.join(cache_dir, CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION + clang_format_progname_ext)
+ self.path = os.path.join(
+ cache_dir,
+ CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION + clang_format_progname_ext)
# Download a new version if the cache is empty or stale
if not os.path.isfile(self.path) or not self._validate_version():
@@ -201,7 +210,7 @@ class ClangFormat(object):
get_clang_format_from_darwin_cache(self.path)
else:
print("ERROR: clang-format.py does not support downloading clang-format " +
- " on this platform, please install clang-format " + CLANG_FORMAT_VERSION)
+ " on this platform, please install clang-format " + CLANG_FORMAT_VERSION)
# Validate we have the correct version
# We only can fail here if the user specified a clang-format binary and it is the wrong
@@ -220,8 +229,8 @@ class ClangFormat(object):
if CLANG_FORMAT_VERSION in cf_version:
return True
- print("WARNING: clang-format found in path, but incorrect version found at " +
- self.path + " with version: " + cf_version)
+ print("WARNING: clang-format found in path, but incorrect version found at " + self.path +
+ " with version: " + cf_version)
return False
@@ -243,8 +252,8 @@ class ClangFormat(object):
# Take a lock to ensure diffs do not get mixed when printed to the screen
with self.print_lock:
print("ERROR: Found diff for " + file_name)
- print("To fix formatting errors, run %s --style=file -i %s" %
- (self.path, file_name))
+ print("To fix formatting errors, run %s --style=file -i %s" % (self.path,
+ file_name))
for line in result:
print(line.rstrip())
@@ -275,8 +284,10 @@ class ClangFormat(object):
return formatted
+
files_re = re.compile('\\.(h|hpp|ipp|cpp|js)$')
+
def is_interesting_file(file_name):
""""Return true if this file should be checked
"""
@@ -284,16 +295,19 @@ def is_interesting_file(file_name):
and not file_name.startswith("src/third_party/")
and not file_name.startswith("src/mongo/gotools/")) and files_re.search(file_name)
+
def get_list_from_lines(lines):
""""Convert a string containing a series of lines into a list of strings
"""
return [line.rstrip() for line in lines.splitlines()]
+
def _get_build_dir():
"""Get the location of the scons' build directory in case we need to download clang-format
"""
return os.path.join(git.get_base_dir(), "build")
+
def _lint_files(clang_format, files):
"""Lint a list of files with clang-format
"""
@@ -305,6 +319,7 @@ def _lint_files(clang_format, files):
print("ERROR: Code Style does not match coding style")
sys.exit(1)
+
def lint_patch(clang_format, infile):
"""Lint patch command entry point
"""
@@ -314,6 +329,7 @@ def lint_patch(clang_format, infile):
if files:
_lint_files(clang_format, files)
+
def lint(clang_format):
"""Lint files command entry point
"""
@@ -323,6 +339,7 @@ def lint(clang_format):
return True
+
def lint_all(clang_format):
"""Lint files command entry point based on working tree
"""
@@ -332,18 +349,20 @@ def lint_all(clang_format):
return True
+
def _format_files(clang_format, files):
"""Format a list of files with clang-format
"""
clang_format = ClangFormat(clang_format, _get_build_dir())
format_clean = parallel.parallel_process([os.path.abspath(f) for f in files],
- clang_format.format)
+ clang_format.format)
if not format_clean:
print("ERROR: failed to format files")
sys.exit(1)
+
def format_func(clang_format):
"""Format files command entry point
"""
@@ -351,6 +370,7 @@ def format_func(clang_format):
_format_files(clang_format, files)
+
def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reformat):
"""Reformat a branch made before a clang-format run
"""
@@ -367,15 +387,16 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
# Validate that user passes valid commits
if not repo.is_commit(commit_prior_to_reformat):
raise ValueError("Commit Prior to Reformat '%s' is not a valid commit in this repo" %
- commit_prior_to_reformat)
+ commit_prior_to_reformat)
if not repo.is_commit(commit_after_reformat):
- raise ValueError("Commit After Reformat '%s' is not a valid commit in this repo" %
- commit_after_reformat)
+ raise ValueError(
+ "Commit After Reformat '%s' is not a valid commit in this repo" % commit_after_reformat)
if not repo.is_ancestor(commit_prior_to_reformat, commit_after_reformat):
raise ValueError(("Commit Prior to Reformat '%s' is not a valid ancestor of Commit After" +
- " Reformat '%s' in this repo") % (commit_prior_to_reformat, commit_after_reformat))
+ " Reformat '%s' in this repo") % (commit_prior_to_reformat,
+ commit_after_reformat))
# Validate the user is on a local branch that has the right merge base
if repo.is_detached():
@@ -383,27 +404,36 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
# Validate the user has no pending changes
if repo.is_working_tree_dirty():
- raise ValueError("Your working tree has pending changes. You must have a clean working tree before proceeding.")
+ raise ValueError(
+ "Your working tree has pending changes. You must have a clean working tree before proceeding."
+ )
merge_base = repo.get_merge_base(commit_prior_to_reformat)
if not merge_base == commit_prior_to_reformat:
- raise ValueError("Please rebase to '%s' and resolve all conflicts before running this script" % (commit_prior_to_reformat))
+ raise ValueError(
+ "Please rebase to '%s' and resolve all conflicts before running this script" %
+ (commit_prior_to_reformat))
# We assume the target branch is master, it could be a different branch if needed for testing
merge_base = repo.get_merge_base("master")
if not merge_base == commit_prior_to_reformat:
- raise ValueError("This branch appears to already have advanced too far through the merge process")
+ raise ValueError(
+ "This branch appears to already have advanced too far through the merge process")
# Everything looks good so lets start going through all the commits
branch_name = repo.get_branch_name()
new_branch = "%s-reformatted" % branch_name
if repo.does_branch_exist(new_branch):
- raise ValueError("The branch '%s' already exists. Please delete the branch '%s', or rename the current branch." % (new_branch, new_branch))
+ raise ValueError(
+ "The branch '%s' already exists. Please delete the branch '%s', or rename the current branch."
+ % (new_branch, new_branch))
- commits = get_list_from_lines(repo.log(["--reverse", "--pretty=format:%H", "%s..HEAD" % commit_prior_to_reformat]))
+ commits = get_list_from_lines(
+ repo.log(["--reverse", "--pretty=format:%H",
+ "%s..HEAD" % commit_prior_to_reformat]))
previous_commit_base = commit_after_reformat
@@ -423,8 +453,8 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
# Format each file needed if it was not deleted
if not os.path.exists(commit_file):
- print("Skipping file '%s' since it has been deleted in commit '%s'" % (
- commit_file, commit_hash))
+ print("Skipping file '%s' since it has been deleted in commit '%s'" % (commit_file,
+ commit_hash))
deleted_files.append(commit_file)
continue
@@ -432,11 +462,11 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
clang_format.format(commit_file)
else:
print("Skipping file '%s' since it is not a file clang_format should format" %
- commit_file)
+ commit_file)
# Check if anything needed reformatting, and if so amend the commit
if not repo.is_working_tree_dirty():
- print ("Commit %s needed no reformatting" % commit_hash)
+ print("Commit %s needed no reformatting" % commit_hash)
else:
repo.commit(["--all", "--amend", "--no-edit"])
@@ -448,8 +478,8 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
repo.checkout(["--quiet", previous_commit_base])
# Copy each file from the reformatted commit on top of the post reformat
- diff_files = get_list_from_lines(repo.diff(["%s~..%s" % (previous_commit, previous_commit),
- "--name-only"]))
+ diff_files = get_list_from_lines(
+ repo.diff(["%s~..%s" % (previous_commit, previous_commit), "--name-only"]))
for diff_file in diff_files:
# If the file was deleted in the commit we are reformatting, we need to delete it again
@@ -478,7 +508,8 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
repo.checkout(["-b", new_branch])
print("reformat-branch is done running.\n")
- print("A copy of your branch has been made named '%s', and formatted with clang-format.\n" % new_branch)
+ print("A copy of your branch has been made named '%s', and formatted with clang-format.\n" %
+ new_branch)
print("The original branch has been left unchanged.")
print("The next step is to rebase the new branch on 'master'.")
@@ -486,7 +517,10 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
def usage():
"""Print usage
"""
- print("clang-format.py supports 5 commands [ lint, lint-all, lint-patch, format, reformat-branch].")
+ print(
+ "clang-format.py supports 5 commands [ lint, lint-all, lint-patch, format, reformat-branch]."
+ )
+
def main():
"""Main entry point
@@ -510,7 +544,9 @@ def main():
elif command == "reformat-branch":
if len(args) < 3:
- print("ERROR: reformat-branch takes two parameters: commit_prior_to_reformat commit_after_reformat")
+ print(
+ "ERROR: reformat-branch takes two parameters: commit_prior_to_reformat commit_after_reformat"
+ )
return
reformat_branch(options.clang_format, args[2], args[3])
@@ -519,5 +555,6 @@ def main():
else:
usage()
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/collect_resource_info.py b/buildscripts/collect_resource_info.py
index 56bbb073fca..211d00e3235 100755
--- a/buildscripts/collect_resource_info.py
+++ b/buildscripts/collect_resource_info.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Collect system resource information on processes running in Evergreen on a given interval.
"""
@@ -16,29 +15,23 @@ import time
from bson.json_util import dumps
import requests
-
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from buildscripts.resmokelib import utils
-
+
def main():
usage = "usage: %prog [options]"
parser = optparse.OptionParser(description=__doc__, usage=usage)
- parser.add_option("-i", "--interval",
- dest="interval",
- default=5,
- type="int",
+ parser.add_option("-i", "--interval", dest="interval", default=5, type="int",
help="Collect system resource information every <interval> seconds. "
- "Default is every 5 seconds.")
- parser.add_option("-o", "--output-file",
- dest="outfile",
- default="-",
+ "Default is every 5 seconds.")
+ parser.add_option("-o", "--output-file", dest="outfile", default="-",
help="If '-', then the file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout.")
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout.")
(options, _) = parser.parse_args()
diff --git a/buildscripts/combine_reports.py b/buildscripts/combine_reports.py
index cbfbaf9d65d..e59cc29f172 100755
--- a/buildscripts/combine_reports.py
+++ b/buildscripts/combine_reports.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Combines JSON report files used in Evergreen
"""
@@ -53,15 +52,11 @@ def check_error(input_count, output_count):
def main():
usage = "usage: %prog [options] report1.json report2.json ..."
parser = OptionParser(description=__doc__, usage=usage)
- parser.add_option("-o", "--output-file",
- dest="outfile",
- default="-",
- help="If '-', then the combined report file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout.")
- parser.add_option("-x", "--no-report-exit",
- dest="report_exit",
- default=True,
+ parser.add_option("-o", "--output-file", dest="outfile", default="-",
+ help=("If '-', then the combined report file is written to stdout."
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout."))
+ parser.add_option("-x", "--no-report-exit", dest="report_exit", default=True,
action="store_false",
help="Do not exit with a non-zero code if any test in the report fails.")
diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py
index cc46789907f..17c59badde3 100755
--- a/buildscripts/errorcodes.py
+++ b/buildscripts/errorcodes.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Produces a report of all assertions in the MongoDB server codebase.
Parses .cpp files for assertions and verifies assertion codes are distinct.
@@ -19,49 +18,50 @@ except ImportError:
print("*** Run 'pip2 install --user regex' to speed up error code checking")
import re
-ASSERT_NAMES = [ "uassert" , "massert", "fassert", "fassertFailed" ]
+ASSERT_NAMES = ["uassert", "massert", "fassert", "fassertFailed"]
MINIMUM_CODE = 10000
codes = []
# Each AssertLocation identifies the C++ source location of an assertion
-AssertLocation = namedtuple( "AssertLocation", ['sourceFile', 'byteOffset', 'lines', 'code'] )
+AssertLocation = namedtuple("AssertLocation", ['sourceFile', 'byteOffset', 'lines', 'code'])
list_files = False
+
# Of historical interest only
def assignErrorCodes():
cur = MINIMUM_CODE
for root in ASSERT_NAMES:
for x in utils.getAllSourceFiles():
- print( x )
+ print(x)
didAnything = False
fixed = ""
- for line in open( x ):
- s = line.partition( root + "(" )
- if s[1] == "" or line.startswith( "#define " + root):
+ for line in open(x):
+ s = line.partition(root + "(")
+ if s[1] == "" or line.startswith("#define " + root):
fixed += line
continue
- fixed += s[0] + root + "( " + str( cur ) + " , " + s[2]
+ fixed += s[0] + root + "( " + str(cur) + " , " + s[2]
cur = cur + 1
didAnything = True
if didAnything:
- out = open( x , 'w' )
- out.write( fixed )
+ out = open(x, 'w')
+ out.write(fixed)
out.close()
-def parseSourceFiles( callback ):
+def parseSourceFiles(callback):
"""Walks MongoDB sourcefiles and invokes callback for each AssertLocation found."""
quick = ["assert", "Exception", "ErrorCodes::Error"]
patterns = [
- re.compile( r"(?:u|m(?:sg)?)asser(?:t|ted)(?:NoTrace)?\s*\(\s*(\d+)", re.MULTILINE ) ,
- re.compile( r"(?:DB|Assertion)Exception\s*[({]\s*(\d+)", re.MULTILINE ),
- re.compile( r"fassert(?:Failed)?(?:WithStatus)?(?:NoTrace)?(?:StatusOK)?\s*\(\s*(\d+)",
- re.MULTILINE ),
- re.compile( r"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE )
+ re.compile(r"(?:u|m(?:sg)?)asser(?:t|ted)(?:NoTrace)?\s*\(\s*(\d+)", re.MULTILINE),
+ re.compile(r"(?:DB|Assertion)Exception\s*[({]\s*(\d+)", re.MULTILINE),
+ re.compile(r"fassert(?:Failed)?(?:WithStatus)?(?:NoTrace)?(?:StatusOK)?\s*\(\s*(\d+)",
+ re.MULTILINE),
+ re.compile(r"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE)
]
for sourceFile in utils.getAllSourceFiles(prefix='src/mongo/'):
@@ -83,12 +83,11 @@ def parseSourceFiles( callback ):
# Note that this will include the text of the full match but will report the
# position of the beginning of the code portion rather than the beginning of the
# match. This is to position editors on the spot that needs to change.
- thisLoc = AssertLocation(sourceFile,
- codeOffset,
- text[match.start():match.end()],
- code)
+ thisLoc = AssertLocation(sourceFile, codeOffset,
+ text[match.start():match.end()], code)
+
+ callback(thisLoc)
- callback( thisLoc )
# Converts an absolute position in a file into a line number.
def getLineAndColumnForPosition(loc, _file_cache={}):
@@ -105,7 +104,8 @@ def getLineAndColumnForPosition(loc, _file_cache={}):
column = loc.byteOffset - _file_cache[loc.sourceFile][line - 1] + 1
return (line, column)
-def isTerminated( lines ):
+
+def isTerminated(lines):
"""Given .cpp/.h source lines as text, determine if assert is terminated."""
x = " ".join(lines)
return ';' in x \
@@ -121,8 +121,7 @@ def getNextCode():
if not len(codes) > 0:
readErrorCodes()
- highest = reduce( lambda x, y: max(int(x), int(y)),
- (loc.code for loc in codes) )
+ highest = reduce(lambda x, y: max(int(x), int(y)), (loc.code for loc in codes))
return highest + 1
@@ -130,7 +129,7 @@ def checkErrorCodes():
"""SConstruct expects a boolean response from this function.
"""
(codes, errors) = readErrorCodes()
- return len( errors ) == 0
+ return len(errors) == 0
def readErrorCodes():
@@ -142,8 +141,8 @@ def readErrorCodes():
dups = defaultdict(list)
# define callback
- def checkDups( assertLoc ):
- codes.append( assertLoc )
+ def checkDups(assertLoc):
+ codes.append(assertLoc)
code = assertLoc.code
if not code in seen:
@@ -151,32 +150,32 @@ def readErrorCodes():
else:
if not code in dups:
# on first duplicate, add original to dups, errors
- dups[code].append( seen[code] )
- errors.append( seen[code] )
+ dups[code].append(seen[code])
+ errors.append(seen[code])
- dups[code].append( assertLoc )
- errors.append( assertLoc )
+ dups[code].append(assertLoc)
+ errors.append(assertLoc)
- parseSourceFiles( checkDups )
+ parseSourceFiles(checkDups)
if seen.has_key("0"):
code = "0"
bad = seen[code]
- errors.append( bad )
+ errors.append(bad)
line, col = getLineAndColumnForPosition(bad)
- print( "ZERO_CODE:" )
- print( " %s:%d:%d:%s" % (bad.sourceFile, line, col, bad.lines) )
+ print("ZERO_CODE:")
+ print(" %s:%d:%d:%s" % (bad.sourceFile, line, col, bad.lines))
for code, locations in dups.items():
- print( "DUPLICATE IDS: %s" % code )
+ print("DUPLICATE IDS: %s" % code)
for loc in locations:
line, col = getLineAndColumnForPosition(loc)
- print( " %s:%d:%d:%s" % (loc.sourceFile, line, col, loc.lines) )
+ print(" %s:%d:%d:%s" % (loc.sourceFile, line, col, loc.lines))
return (codes, errors)
-def replaceBadCodes( errors, nextCode ):
+def replaceBadCodes(errors, nextCode):
"""Modifies C++ source files to replace invalid assertion codes.
For now, we only modify zero codes.
@@ -189,8 +188,7 @@ def replaceBadCodes( errors, nextCode ):
for loc in skip_errors:
line, col = getLineAndColumnForPosition(loc)
- print ("SKIPPING NONZERO code=%s: %s:%d:%d"
- % (loc.code, loc.sourceFile, line, col))
+ print("SKIPPING NONZERO code=%s: %s:%d:%d" % (loc.code, loc.sourceFile, line, col))
# Dedupe, sort, and reverse so we don't have to update offsets as we go.
for assertLoc in reversed(sorted(set(zero_errors))):
@@ -209,14 +207,14 @@ def replaceBadCodes( errors, nextCode ):
f.seek(0)
f.write(text[:byteOffset])
f.write(str(nextCode))
- f.write(text[byteOffset+1:])
+ f.write(text[byteOffset + 1:])
f.seek(0)
print "LINE_%d_AFTER :%s" % (lineNum, f.readlines()[ln].rstrip())
nextCode += 1
-def getBestMessage( lines , codeStr ):
+def getBestMessage(lines, codeStr):
"""Extracts message from one AssertionLocation.lines entry
Args:
@@ -225,7 +223,7 @@ def getBestMessage( lines , codeStr ):
"""
line = lines if isinstance(lines, str) else " ".join(lines)
- err = line.partition( codeStr )[2]
+ err = line.partition(codeStr)[2]
if not err:
return ""
@@ -249,16 +247,14 @@ def getBestMessage( lines , codeStr ):
return err.strip()
+
def main():
parser = OptionParser(description=__doc__.strip())
- parser.add_option("--fix", dest="replace",
- action="store_true", default=False,
+ parser.add_option("--fix", dest="replace", action="store_true", default=False,
help="Fix zero codes in source files [default: %default]")
- parser.add_option("-q", "--quiet", dest="quiet",
- action="store_true", default=False,
+ parser.add_option("-q", "--quiet", dest="quiet", action="store_true", default=False,
help="Suppress output on success [default: %default]")
- parser.add_option("--list-files", dest="list_files",
- action="store_true", default=False,
+ parser.add_option("--list-files", dest="list_files", action="store_true", default=False,
help="Print the name of each file as it is scanned [default: %default]")
(options, args) = parser.parse_args()
diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py
index c1ab04fbab9..8a68220d5cd 100755
--- a/buildscripts/eslint.py
+++ b/buildscripts/eslint.py
@@ -52,11 +52,13 @@ ESLINT_HTTP_DARWIN_CACHE = "https://s3.amazonaws.com/boxes.10gen.com/build/eslin
# Path in the tarball to the ESLint binary.
ESLINT_SOURCE_TAR_BASE = string.Template(ESLINT_PROGNAME + "-$platform-$arch")
+
def callo(args):
"""Call a program, and capture its output
"""
return subprocess.check_output(args)
+
def extract_eslint(tar_path, target_file):
tarfp = tarfile.open(tar_path)
for name in tarfp.getnames():
@@ -64,6 +66,7 @@ def extract_eslint(tar_path, target_file):
tarfp.extract(name)
tarfp.close()
+
def get_eslint_from_cache(dest_file, platform, arch):
"""Get ESLint binary from mongodb's cache
"""
@@ -79,8 +82,7 @@ def get_eslint_from_cache(dest_file, platform, arch):
temp_tar_file = os.path.join(dest_dir, "temp.tar.gz")
# Download the file
- print("Downloading ESLint %s from %s, saving to %s" % (ESLINT_VERSION,
- url, temp_tar_file))
+ print("Downloading ESLint %s from %s, saving to %s" % (ESLINT_VERSION, url, temp_tar_file))
urllib.urlretrieve(url, temp_tar_file)
eslint_distfile = ESLINT_SOURCE_TAR_BASE.substitute(platform=platform, arch=arch)
@@ -91,6 +93,7 @@ def get_eslint_from_cache(dest_file, platform, arch):
class ESLint(object):
"""Class encapsulates finding a suitable copy of ESLint, and linting an individual file
"""
+
def __init__(self, path, cache_dir):
eslint_progname = ESLINT_PROGNAME
@@ -155,8 +158,8 @@ class ESLint(object):
return True
if warn:
- print("WARNING: eslint found in path, but incorrect version found at " +
- self.path + " with version: " + esl_version)
+ print("WARNING: eslint found in path, but incorrect version found at " + self.path +
+ " with version: " + esl_version)
return False
def _lint(self, file_name, print_diff):
@@ -189,17 +192,20 @@ class ESLint(object):
"""
return not subprocess.call([self.path, "--fix", file_name])
+
def is_interesting_file(file_name):
""""Return true if this file should be checked
"""
return ((file_name.startswith("src/mongo") or file_name.startswith("jstests"))
and file_name.endswith(".js"))
+
def _get_build_dir():
"""Get the location of the scons build directory in case we need to download ESLint
"""
return os.path.join(git.get_base_dir(), "build")
+
def _lint_files(eslint, files):
"""Lint a list of files with ESLint
"""
@@ -214,6 +220,7 @@ def _lint_files(eslint, files):
return True
+
def lint_patch(eslint, infile):
"""Lint patch command entry point
"""
@@ -224,6 +231,7 @@ def lint_patch(eslint, infile):
return _lint_files(eslint, files)
return True
+
def lint(eslint, dirmode, glob):
"""Lint files command entry point
"""
@@ -236,6 +244,7 @@ def lint(eslint, dirmode, glob):
return True
+
def _autofix_files(eslint, files):
"""Auto-fix the specified files with ESLint.
"""
@@ -247,6 +256,7 @@ def _autofix_files(eslint, files):
print("ERROR: failed to auto-fix files")
return False
+
def autofix_func(eslint, dirmode, glob):
"""Auto-fix files command entry point
"""
@@ -268,11 +278,16 @@ def main():
"provided patch file (for upload.py). "\
"fix runs ESLint with --fix on provided patterns "\
"or files under jstests/ and src/mongo."
- epilog ="*Unless you specify -d a separate ESLint process will be launched for every file"
+ epilog = "*Unless you specify -d a separate ESLint process will be launched for every file"
parser = OptionParser()
parser = OptionParser(usage=usage, description=description, epilog=epilog)
- parser.add_option("-e", "--eslint", type="string", dest="eslint",
- help="Fully qualified path to eslint executable",)
+ parser.add_option(
+ "-e",
+ "--eslint",
+ type="string",
+ dest="eslint",
+ help="Fully qualified path to eslint executable",
+ )
parser.add_option("-d", "--dirmode", action="store_true", default=True, dest="dirmode",
help="Considers the glob patterns as directories and runs ESLint process " \
"against each pattern",)
@@ -301,5 +316,7 @@ def main():
parser.print_help()
sys.exit(0 if success else 1)
+
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/evergreen_run_tests.py b/buildscripts/evergreen_run_tests.py
index 2b30f19443c..3ed822287b3 100755
--- a/buildscripts/evergreen_run_tests.py
+++ b/buildscripts/evergreen_run_tests.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Command line utility for executing MongoDB tests in Evergreen.
"""
@@ -17,7 +16,6 @@ if __name__ == "__main__" and __package__ is None:
from buildscripts import resmoke
from buildscripts import resmokelib
-
_TagInfo = collections.namedtuple("_TagInfo", ["tag_name", "evergreen_aware", "suite_options"])
@@ -27,23 +25,18 @@ class Main(resmoke.Main):
additional options for running unreliable tests in Evergreen.
"""
- UNRELIABLE_TAG = _TagInfo(tag_name="unreliable",
- evergreen_aware=True,
+ UNRELIABLE_TAG = _TagInfo(tag_name="unreliable", evergreen_aware=True,
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
report_failure_status="silentfail"))
RESOURCE_INTENSIVE_TAG = _TagInfo(
- tag_name="resource_intensive",
- evergreen_aware=False,
+ tag_name="resource_intensive", evergreen_aware=False,
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(num_jobs=1))
RETRY_ON_FAILURE_TAG = _TagInfo(
- tag_name="retry_on_failure",
- evergreen_aware=True,
+ tag_name="retry_on_failure", evergreen_aware=True,
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
- fail_fast=False,
- num_repeats=2,
- report_failure_status="silentfail"))
+ fail_fast=False, num_repeats=2, report_failure_status="silentfail"))
def _make_evergreen_aware_tags(self, tag_name):
"""
@@ -61,11 +54,11 @@ class Main(resmoke.Main):
if resmokelib.config.EVERGREEN_DISTRO_ID is not None:
tags_format.append("{tag_name}|{task_name}|{variant_name}|{distro_id}")
- return [tag.format(tag_name=tag_name,
- task_name=resmokelib.config.EVERGREEN_TASK_NAME,
- variant_name=resmokelib.config.EVERGREEN_VARIANT_NAME,
- distro_id=resmokelib.config.EVERGREEN_DISTRO_ID)
- for tag in tags_format]
+ return [
+ tag.format(tag_name=tag_name, task_name=resmokelib.config.EVERGREEN_TASK_NAME,
+ variant_name=resmokelib.config.EVERGREEN_VARIANT_NAME,
+ distro_id=resmokelib.config.EVERGREEN_DISTRO_ID) for tag in tags_format
+ ]
@classmethod
def _make_tag_combinations(cls):
@@ -77,31 +70,28 @@ class Main(resmoke.Main):
combinations = []
if resmokelib.config.EVERGREEN_PATCH_BUILD:
- combinations.append((
- "unreliable and resource intensive",
- ((cls.UNRELIABLE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, True))))
- combinations.append((
- "unreliable and not resource intensive",
- ((cls.UNRELIABLE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, False))))
- combinations.append((
- "reliable and resource intensive",
- ((cls.UNRELIABLE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG, True))))
- combinations.append((
- "reliable and not resource intensive",
- ((cls.UNRELIABLE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG, False))))
+ combinations.append(("unreliable and resource intensive",
+ ((cls.UNRELIABLE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, True))))
+ combinations.append(("unreliable and not resource intensive",
+ ((cls.UNRELIABLE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, False))))
+ combinations.append(("reliable and resource intensive",
+ ((cls.UNRELIABLE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG, True))))
+ combinations.append(("reliable and not resource intensive",
+ ((cls.UNRELIABLE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG,
+ False))))
else:
- combinations.append((
- "retry on failure and resource intensive",
- ((cls.RETRY_ON_FAILURE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, True))))
- combinations.append((
- "retry on failure and not resource intensive",
- ((cls.RETRY_ON_FAILURE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG, False))))
- combinations.append((
- "run once and resource intensive",
- ((cls.RETRY_ON_FAILURE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG, True))))
- combinations.append((
- "run once and not resource intensive",
- ((cls.RETRY_ON_FAILURE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG, False))))
+ combinations.append(("retry on failure and resource intensive",
+ ((cls.RETRY_ON_FAILURE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG,
+ True))))
+ combinations.append(("retry on failure and not resource intensive",
+ ((cls.RETRY_ON_FAILURE_TAG, True), (cls.RESOURCE_INTENSIVE_TAG,
+ False))))
+ combinations.append(("run once and resource intensive",
+ ((cls.RETRY_ON_FAILURE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG,
+ True))))
+ combinations.append(("run once and not resource intensive",
+ ((cls.RETRY_ON_FAILURE_TAG, False), (cls.RESOURCE_INTENSIVE_TAG,
+ False))))
return combinations
diff --git a/buildscripts/fetch_test_lifecycle.py b/buildscripts/fetch_test_lifecycle.py
index 945f26dace0..e1e3db1ffd1 100755
--- a/buildscripts/fetch_test_lifecycle.py
+++ b/buildscripts/fetch_test_lifecycle.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Script to retrieve the etc/test_lifecycle.yml tag file from the metadata repository that
corresponds to the current repository.
@@ -26,7 +25,6 @@ if __name__ == "__main__" and __package__ is None:
from buildscripts import git
-
LOGGER = logging.getLogger(__name__)
@@ -77,8 +75,8 @@ class MetadataRepository(object):
def get_lifecycle_file_content(self, metadata_revision):
"""Return the content of the test lifecycle file as it was at the given revision."""
- return self._repository.git_cat_file(["blob", "%s:%s" % (metadata_revision,
- self._lifecycle_file)])
+ return self._repository.git_cat_file(
+ ["blob", "%s:%s" % (metadata_revision, self._lifecycle_file)])
def _clone_repository(url, branch):
@@ -116,8 +114,8 @@ def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, pro
project: the Evergreen project name.
revision: the current repository revision.
"""
- metadata_repo = MetadataRepository(_clone_repository(metadata_repo_url, project),
- references_file, lifecycle_file)
+ metadata_repo = MetadataRepository(
+ _clone_repository(metadata_repo_url, project), references_file, lifecycle_file)
mongo_repo = git.Repository(os.getcwd())
metadata_revision = _get_metadata_revision(metadata_repo, mongo_repo, project, revision)
if metadata_revision:
@@ -133,49 +131,39 @@ def main():
Utility to fetch the etc/test_lifecycle.yml file corresponding to a given revision from
the mongo-test-metadata repository.
"""
- parser = optparse.OptionParser(description=textwrap.dedent(main.__doc__),
- usage="Usage: %prog [options] evergreen-project")
+ parser = optparse.OptionParser(
+ description=textwrap.dedent(main.__doc__), usage="Usage: %prog [options] evergreen-project")
- parser.add_option("--revision", dest="revision",
- metavar="<revision>",
- default="HEAD",
+ parser.add_option("--revision", dest="revision", metavar="<revision>", default="HEAD",
help=("The project revision for which to retrieve the test lifecycle tags"
" file."))
- parser.add_option("--metadataRepo", dest="metadata_repo_url",
- metavar="<metadata-repo-url>",
+ parser.add_option("--metadataRepo", dest="metadata_repo_url", metavar="<metadata-repo-url>",
default="git@github.com:mongodb/mongo-test-metadata.git",
help=("The URL to the metadata repository that contains the test lifecycle"
" tags file."))
- parser.add_option("--lifecycleFile", dest="lifecycle_file",
- metavar="<lifecycle-file>",
+ parser.add_option("--lifecycleFile", dest="lifecycle_file", metavar="<lifecycle-file>",
default="etc/test_lifecycle.yml",
help=("The path to the test lifecycle tags file, relative to the root of the"
" metadata repository. Defaults to '%default'."))
- parser.add_option("--referencesFile", dest="references_file",
- metavar="<references-file>",
+ parser.add_option("--referencesFile", dest="references_file", metavar="<references-file>",
default="references.yml",
help=("The path to the metadata references file, relative to the root of the"
" metadata repository. Defaults to '%default'."))
- parser.add_option("--destinationFile", dest="destination_file",
- metavar="<destination-file>",
+ parser.add_option("--destinationFile", dest="destination_file", metavar="<destination-file>",
default="etc/test_lifecycle.yml",
help=("The path where the lifecycle file should be available when this script"
" completes successfully. This path is absolute or relative to the"
" current working directory. Defaults to '%default'."))
- parser.add_option("--logLevel", dest="log_level",
- metavar="<log-level>",
- choices=["DEBUG", "INFO", "WARNING", "ERROR"],
- default="INFO",
+ parser.add_option("--logLevel", dest="log_level", metavar="<log-level>",
+ choices=["DEBUG", "INFO", "WARNING", "ERROR"], default="INFO",
help="The log level: DEBUG, INFO, WARNING or ERROR. Defaults to '%default'.")
- parser.add_option("--logFile", dest="log_file",
- metavar="<log-file>",
- default=None,
+ parser.add_option("--logFile", dest="log_file", metavar="<log-file>", default=None,
help=("The destination file for the logs. If not set the script will log to"
" the standard output"))
@@ -187,14 +175,12 @@ def main():
parser.error("Must specify an Evergreen project")
evergreen_project = args[0]
- logging.basicConfig(format="%(asctime)s %(levelname)s %(message)s",
- level=options.log_level, filename=options.log_file)
+ logging.basicConfig(format="%(asctime)s %(levelname)s %(message)s", level=options.log_level,
+ filename=options.log_file)
lifecycle_file_content = fetch_test_lifecycle(options.metadata_repo_url,
- options.references_file,
- options.lifecycle_file,
- evergreen_project,
- options.revision)
+ options.references_file, options.lifecycle_file,
+ evergreen_project, options.revision)
if not lifecycle_file_content:
LOGGER.error("Failed to fetch the test lifecycle tag file.")
sys.exit(1)
diff --git a/buildscripts/gdb/mongo.py b/buildscripts/gdb/mongo.py
index c63fbe84761..5888662b6ef 100644
--- a/buildscripts/gdb/mongo.py
+++ b/buildscripts/gdb/mongo.py
@@ -62,6 +62,7 @@ class DumpGlobalServiceContext(gdb.Command):
def invoke(self, arg, _from_tty):
gdb.execute("print *('mongo::(anonymous namespace)::globalServiceContext')")
+
# Register command
DumpGlobalServiceContext()
@@ -92,6 +93,7 @@ class MongoDBDumpLocks(gdb.Command):
except gdb.error as gdberr:
print("Ignoring error '%s' in dump_mongod_locks" % str(gdberr))
+
# Register command
MongoDBDumpLocks()
@@ -113,6 +115,7 @@ class BtIfActive(gdb.Command):
else:
gdb.execute("bt")
+
# Register command
BtIfActive()
@@ -204,6 +207,7 @@ class MongoDBUniqueStack(gdb.Command):
print(stack['output'])
print() # leave extra blank line after each thread stack
+
# Register command
MongoDBUniqueStack()
@@ -263,6 +267,7 @@ class MongoDBHelp(gdb.Command):
for key in mongo_commands:
print("%s - %s" % (key, mongo_commands[key]))
+
# Register command
MongoDBHelp()
diff --git a/buildscripts/gdb/mongo_lock.py b/buildscripts/gdb/mongo_lock.py
index 1d20ca8e0bc..b045f8344ca 100644
--- a/buildscripts/gdb/mongo_lock.py
+++ b/buildscripts/gdb/mongo_lock.py
@@ -131,8 +131,8 @@ class Graph(object):
color = ""
if nodes and node_key in nodes:
color = "color = red"
- sb.append(' "{}" [label="{}" {}]'.format(
- node_key, self.nodes[node_key]['node'], color))
+ sb.append(' "{}" [label="{}" {}]'.format(node_key, self.nodes[node_key]['node'],
+ color))
sb.append("}")
return "\n".join(sb)
@@ -222,8 +222,8 @@ def find_mutex_holder(graph, thread_dict, show):
# Use the thread LWP as a substitute for showing output or generating the graph.
if mutex_holder not in thread_dict:
print("Warning: Mutex at {} held by thread with LWP {}"
- " not found in thread_dict. Using LWP to track thread.".format(mutex_value,
- mutex_holder))
+ " not found in thread_dict. Using LWP to track thread.".format(
+ mutex_value, mutex_holder))
mutex_holder_id = mutex_holder
else:
mutex_holder_id = thread_dict[mutex_holder]
@@ -232,14 +232,11 @@ def find_mutex_holder(graph, thread_dict, show):
mutex_waiter_id = thread_dict[mutex_waiter_lwpid]
if show:
print("Mutex at {} held by thread 0x{:x} (LWP {})"
- " waited on by thread 0x{:x} (LWP {})".format(mutex_value,
- mutex_holder_id,
- mutex_holder,
- mutex_waiter_id,
- mutex_waiter_lwpid))
+ " waited on by thread 0x{:x} (LWP {})".format(
+ mutex_value, mutex_holder_id, mutex_holder, mutex_waiter_id, mutex_waiter_lwpid))
if graph:
- graph.add_edge(Thread(mutex_waiter_id, mutex_waiter_lwpid),
- Lock(long(mutex_value), "Mutex"))
+ graph.add_edge(
+ Thread(mutex_waiter_id, mutex_waiter_lwpid), Lock(long(mutex_value), "Mutex"))
graph.add_edge(Lock(long(mutex_value), "Mutex"), Thread(mutex_holder_id, mutex_holder))
@@ -268,11 +265,11 @@ def find_lock_manager_holders(graph, thread_dict, show):
if show:
print("MongoDB Lock at {} ({}) held by thread id 0x{:x} (LWP {})".format(
lock_head, lock_request["mode"], lock_thread_id, lock_thread_lwpid) +
- " waited on by thread 0x{:x} (LWP {})".format(thread_dict[lwpid], lwpid))
+ " waited on by thread 0x{:x} (LWP {})".format(thread_dict[lwpid], lwpid))
if graph:
graph.add_edge(Thread(thread_dict[lwpid], lwpid), Lock(long(lock_head), "MongoDB lock"))
- graph.add_edge(Lock(long(lock_head), "MongoDB lock"),
- Thread(lock_thread_id, lock_thread_lwpid))
+ graph.add_edge(
+ Lock(long(lock_head), "MongoDB lock"), Thread(lock_thread_id, lock_thread_lwpid))
lock_request_ptr = lock_request["next"]
@@ -311,6 +308,7 @@ def get_threads_info(graph=None):
class MongoDBShowLocks(gdb.Command):
"""Show MongoDB locks & pthread mutexes"""
+
def __init__(self):
register_mongo_command(self, "mongodb-show-locks", gdb.COMMAND_DATA)
@@ -325,11 +323,13 @@ class MongoDBShowLocks(gdb.Command):
except gdb.error as err:
print("Ignoring GDB error '%s' in mongodb_show_locks" % str(err))
+
MongoDBShowLocks()
class MongoDBWaitsForGraph(gdb.Command):
"""Create MongoDB WaitsFor lock graph [graph_file]"""
+
def __init__(self):
register_mongo_command(self, "mongodb-waitsfor-graph", gdb.COMMAND_DATA)
diff --git a/buildscripts/gdb/mongo_printers.py b/buildscripts/gdb/mongo_printers.py
index 1e0ada49fae..6c6d39e5ec7 100644
--- a/buildscripts/gdb/mongo_printers.py
+++ b/buildscripts/gdb/mongo_printers.py
@@ -51,6 +51,7 @@ class StatusPrinter(object):
class StatusWithPrinter:
"""Pretty-printer for mongo::StatusWith<>"""
+
def __init__(self, val):
self.val = val
@@ -190,8 +191,7 @@ class DecorablePrinter:
return 'map'
def to_string(self):
- return "Decorable<%s> with %s elems " % (self.val.type.template_argument(0),
- self.count)
+ return "Decorable<%s> with %s elems " % (self.val.type.template_argument(0), self.count)
def children(self):
decorationData = get_unique_ptr(self.val["_decorations"]["_decorationData"])
@@ -205,7 +205,7 @@ class DecorablePrinter:
# TODO: abstract out navigating a std::function
type_name = str(descriptor["constructor"]["_M_functor"]["_M_unused"]["_M_object"])
type_name = type_name[0:len(type_name) - 1]
- type_name = type_name[0: type_name.rindex(">")]
+ type_name = type_name[0:type_name.rindex(">")]
type_name = type_name[type_name.index("constructAt<"):].replace("constructAt<", "")
# If the type is a pointer type, strip the * at the end.
@@ -287,8 +287,8 @@ class MongoPrettyPrinterCollection(gdb.printing.PrettyPrinter):
if index == -1 or index + 1 == len(lookup_tag):
for printer in self.subprinters:
if printer.enabled and (
- (printer.is_template and lookup_tag.find(printer.prefix) == 0) or
- (not printer.is_template and lookup_tag == printer.prefix)):
+ (printer.is_template and lookup_tag.find(printer.prefix) == 0) or
+ (not printer.is_template and lookup_tag == printer.prefix)):
return printer.printer(val)
return None
@@ -301,9 +301,11 @@ def build_pretty_printer():
pp.add('Status', 'mongo::Status', False, StatusPrinter)
pp.add('StatusWith', 'mongo::StatusWith', True, StatusWithPrinter)
pp.add('StringData', 'mongo::StringData', False, StringDataPrinter)
- pp.add('UnorderedFastKeyTable', 'mongo::UnorderedFastKeyTable', True, UnorderedFastKeyTablePrinter)
+ pp.add('UnorderedFastKeyTable', 'mongo::UnorderedFastKeyTable', True,
+ UnorderedFastKeyTablePrinter)
return pp
+
###################################################################################################
#
# Setup
@@ -311,9 +313,6 @@ def build_pretty_printer():
###################################################################################################
# Register pretty-printers, replace existing mongo printers
-gdb.printing.register_pretty_printer(
- gdb.current_objfile(),
- build_pretty_printer(),
- True)
+gdb.printing.register_pretty_printer(gdb.current_objfile(), build_pretty_printer(), True)
print("MongoDB GDB pretty-printers loaded")
diff --git a/buildscripts/generate_compile_expansions_shared_cache.py b/buildscripts/generate_compile_expansions_shared_cache.py
index 4dc937cce12..57115e30038 100755
--- a/buildscripts/generate_compile_expansions_shared_cache.py
+++ b/buildscripts/generate_compile_expansions_shared_cache.py
@@ -96,10 +96,12 @@ def generate_scons_cache_expansions():
# Patches are read only
if os.getenv("IS_PATCH"):
- expansions["scons_cache_args"] = "--cache={0} --cache-dir='{1}' --cache-readonly".format(
- scons_cache_mode, default_cache_path)
+ expansions[
+ "scons_cache_args"] = "--cache={0} --cache-dir='{1}' --cache-readonly".format(
+ scons_cache_mode, default_cache_path)
else:
- expansions["scons_cache_args"] = "--cache={0} --cache-dir='{1}'".format(scons_cache_mode, default_cache_path)
+ expansions["scons_cache_args"] = "--cache={0} --cache-dir='{1}'".format(
+ scons_cache_mode, default_cache_path)
# Local shared cache - host-based
elif os.getenv("SCONS_CACHE_SCOPE") == "local":
@@ -111,7 +113,8 @@ def generate_scons_cache_expansions():
default_cache_path = os.path.join(default_cache_path_base, system_uuid)
expansions["scons_cache_path"] = default_cache_path
- expansions["scons_cache_args"] = "--cache={0} --cache-dir='{1}'".format(scons_cache_mode, default_cache_path)
+ expansions["scons_cache_args"] = "--cache={0} --cache-dir='{1}'".format(
+ scons_cache_mode, default_cache_path)
# No cache
else:
# Anything else is 'none'
diff --git a/buildscripts/git.py b/buildscripts/git.py
index 228ab58ab7b..1f013c52ae2 100644
--- a/buildscripts/git.py
+++ b/buildscripts/git.py
@@ -21,18 +21,17 @@ if os.name == "posix" and sys.version_info[0] == 2:
import warnings
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
+ " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
import subprocess
else:
import subprocess
-
LOGGER = logging.getLogger(__name__)
class Repository(object):
"""Represent a local git repository."""
+
def __init__(self, directory):
self.directory = directory
@@ -94,8 +93,7 @@ class Repository(object):
def get_origin_url(self):
"""Return the URL of the origin repository."""
- return self._callgito(
- "config", ["--local", "--get", "remote.origin.url"]).rstrip()
+ return self._callgito("config", ["--local", "--get", "remote.origin.url"]).rstrip()
def get_branch_name(self):
"""
@@ -126,8 +124,7 @@ class Repository(object):
"""Return True if the specified parent hash an ancestor of child hash."""
# If the common point between parent_revision and child_revision is
# parent_revision, then parent_revision is an ancestor of child_revision.
- merge_base = self._callgito("merge-base", [parent_revision,
- child_revision]).rstrip()
+ merge_base = self._callgito("merge-base", [parent_revision, child_revision]).rstrip()
return parent_revision == merge_base
def is_commit(self, revision):
@@ -253,8 +250,9 @@ class GitException(Exception):
element) that were run, if any.
stderr: the error output of the git command.
"""
- def __init__(self, message, returncode=None, cmd=None, process_args=None,
- stdout=None, stderr=None):
+
+ def __init__(self, message, returncode=None, cmd=None, process_args=None, stdout=None,
+ stderr=None):
Exception.__init__(self, message)
self.returncode = returncode
self.cmd = cmd
@@ -284,7 +282,6 @@ class GitCommandResult(object):
def check_returncode(self):
"""Raise GitException if the exit code is non-zero."""
if self.returncode:
- raise GitException(
- "Command '{0}' failed with code '{1}'".format(" ".join(self.process_args),
- self.returncode),
- self.returncode, self.cmd, self.process_args, self.stdout, self.stderr)
+ raise GitException("Command '{0}' failed with code '{1}'".format(
+ " ".join(self.process_args), self.returncode), self.returncode, self.cmd,
+ self.process_args, self.stdout, self.stderr)
diff --git a/buildscripts/hang_analyzer.py b/buildscripts/hang_analyzer.py
index d554e67c384..c2c0fa05b2e 100755
--- a/buildscripts/hang_analyzer.py
+++ b/buildscripts/hang_analyzer.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Hang Analyzer
A prototype hang analyzer for Evergreen integration to help investigate test timeouts
@@ -34,7 +33,6 @@ if _is_windows:
import win32event
import win32api
-
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -99,7 +97,6 @@ def get_process_logger(debugger_output, pid, process_name):
class WindowsDumper(object):
-
def __find_debugger(self, logger, debugger):
"""Finds the installed debugger"""
# We are looking for c:\Program Files (x86)\Windows Kits\8.1\Debuggers\x64
@@ -115,7 +112,7 @@ class WindowsDumper(object):
for i in range(0, 2):
pathToTest = os.path.join(rootDir, "Windows Kits", "8." + str(i), "Debuggers", "x64")
logger.info("Checking for debugger in %s" % pathToTest)
- if(os.path.exists(pathToTest)):
+ if (os.path.exists(pathToTest)):
return os.path.join(pathToTest, debugger)
return None
@@ -129,15 +126,12 @@ class WindowsDumper(object):
root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg,
- process_name,
- pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
dump_command = ""
if take_dump:
# Dump to file, dump_<process name>.<pid>.mdmp
- dump_file = "dump_%s.%d.%s" % (os.path.splitext(process_name)[0],
- pid,
+ dump_file = "dump_%s.%d.%s" % (os.path.splitext(process_name)[0], pid,
self.get_dump_ext())
dump_command = ".dump /ma %s" % dump_file
root_logger.info("Dumping core to %s" % dump_file)
@@ -146,14 +140,14 @@ class WindowsDumper(object):
".symfix", # Fixup symbol path
".symopt +0x10", # Enable line loading (off by default in CDB, on by default in WinDBG)
".reload", # Reload symbols
- "!peb", # Dump current exe, & environment variables
- "lm", # Dump loaded modules
+ "!peb", # Dump current exe, & environment variables
+ "lm", # Dump loaded modules
dump_command,
"!uniqstack -pn", # Dump All unique Threads with function arguments
- "!cs -l", # Dump all locked critical sections
+ "!cs -l", # Dump all locked critical sections
".detach", # Detach
- "q" # Quit
- ]
+ "q" # Quit
+ ]
call([dbg, '-c', ";".join(cmds), '-p', str(pid)], logger)
@@ -164,7 +158,6 @@ class WindowsDumper(object):
class WindowsProcessList(object):
-
def __find_ps(self):
"""Finds tasklist """
return os.path.join(os.environ["WINDIR"], "system32", "tasklist.exe")
@@ -187,7 +180,6 @@ class WindowsProcessList(object):
# LLDB dumper is for MacOS X
class LLDBDumper(object):
-
def __find_debugger(self, debugger):
"""Finds the installed debugger"""
return find_program(debugger, ['/usr/bin'])
@@ -200,9 +192,7 @@ class LLDBDumper(object):
root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg,
- process_name,
- pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
lldb_version = callo([dbg, "--version"], logger)
@@ -236,7 +226,7 @@ class LLDBDumper(object):
dump_command,
"settings set interpreter.prompt-on-quit false",
"quit",
- ]
+ ]
tf = tempfile.NamedTemporaryFile()
@@ -257,7 +247,6 @@ class LLDBDumper(object):
class DarwinProcessList(object):
-
def __find_ps(self):
"""Finds ps"""
return find_program('ps', ['/bin'])
@@ -280,7 +269,6 @@ class DarwinProcessList(object):
# GDB dumper is for Linux & Solaris
class GDBDumper(object):
-
def __find_debugger(self, debugger):
"""Finds the installed debugger"""
return find_program(debugger, ['/opt/mongodbtoolchain/gdb/bin', '/usr/bin'])
@@ -293,9 +281,7 @@ class GDBDumper(object):
logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg,
- process_name,
- pid))
+ root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
dump_command = ""
if take_dump:
@@ -348,26 +334,23 @@ class GDBDumper(object):
'set logging on',
'thread apply all bt',
'set logging off',
- ]
+ ]
cmds = [
"set interactive-mode off",
- "set print thread-events off", # Python calls to gdb.parse_and_eval may cause threads
- # to start and finish. This suppresses those messages
- # from appearing in the return output.
- "file %s" % process_name, # Solaris must load the process to read the symbols.
+ "set print thread-events off", # Suppress GDB messages of threads starting/finishing.
+ "file %s" % process_name, # Solaris must load the process to read the symbols.
"attach %d" % pid,
"info sharedlibrary",
- "info threads", # Dump a simple list of commands to get the thread name
+ "info threads", # Dump a simple list of commands to get the thread name
"set python print-stack full",
- ] + raw_stacks_commands + [
+ ] + raw_stacks_commands + [
source_mongo,
source_mongo_printers,
source_mongo_lock,
mongodb_uniqstack,
- "set scheduler-locking on", # Lock the scheduler, before running any of the
- # following commands, which executes code in the
- # attached process.
+ # Lock the scheduler, before running commands, which execute code in the attached process.
+ "set scheduler-locking on",
dump_command,
mongodb_dump_locks,
mongodb_show_locks,
@@ -375,11 +358,10 @@ class GDBDumper(object):
mongodb_javascript_stack,
"set confirm off",
"quit",
- ]
+ ]
call([dbg, "--quiet", "--nx"] +
- list(itertools.chain.from_iterable([['-ex', b] for b in cmds])),
- logger)
+ list(itertools.chain.from_iterable([['-ex', b] for b in cmds])), logger)
root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
@@ -396,7 +378,6 @@ class GDBDumper(object):
class LinuxProcessList(object):
-
def __find_ps(self):
"""Finds ps"""
return find_program('ps', ['/bin', '/usr/bin'])
@@ -420,7 +401,6 @@ class LinuxProcessList(object):
class SolarisProcessList(object):
-
def __find_ps(self):
"""Finds ps"""
return find_program('ps', ['/bin', '/usr/bin'])
@@ -443,7 +423,6 @@ class SolarisProcessList(object):
# jstack is a JDK utility
class JstackDumper(object):
-
def __find_debugger(self, debugger):
"""Finds the installed jstack debugger"""
return find_program(debugger, ['/usr/bin'])
@@ -457,8 +436,7 @@ class JstackDumper(object):
logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
return
- root_logger.info("Debugger %s, analyzing %s process with PID %d" % (jstack,
- process_name,
+ root_logger.info("Debugger %s, analyzing %s process with PID %d" % (jstack, process_name,
pid))
call([jstack, "-l", str(pid)], logger)
@@ -468,7 +446,6 @@ class JstackDumper(object):
# jstack is a JDK utility
class JstackWindowsDumper(object):
-
def dump_info(self, root_logger, logger, pid, process_name):
"""Dump java thread stack traces to the logger"""
@@ -520,9 +497,7 @@ def signal_event_object(logger, pid):
try:
desired_access = win32event.EVENT_MODIFY_STATE
inherit_handle = False
- task_timeout_handle = win32event.OpenEvent(desired_access,
- inherit_handle,
- event_name)
+ task_timeout_handle = win32event.OpenEvent(desired_access, inherit_handle, event_name)
except win32event.error as err:
logger.info("Exception from win32event.OpenEvent with error: %s" % err)
return
@@ -555,8 +530,7 @@ def signal_process(logger, pid, signalnum):
def pname_match(match_type, pname, interesting_processes):
pname = os.path.splitext(pname)[0]
for ip in interesting_processes:
- if (match_type == 'exact' and pname == ip or
- match_type == 'contains' and ip in pname):
+ if (match_type == 'exact' and pname == ip or match_type == 'contains' and ip in pname):
return True
return False
@@ -601,46 +575,32 @@ def main():
process_ids = []
parser = OptionParser(description=__doc__)
- parser.add_option('-m', '--process-match',
- dest='process_match',
- choices=['contains', 'exact'],
+ parser.add_option('-m', '--process-match', dest='process_match', choices=['contains', 'exact'],
default='contains',
- help="Type of match for process names (-p & -g), specify 'contains', or"
- " 'exact'. Note that the process name match performs the following"
- " conversions: change all process names to lowecase, strip off the file"
- " extension, like '.exe' on Windows. Default is 'contains'.")
- parser.add_option('-p', '--process-names',
- dest='process_names',
+ help=("Type of match for process names (-p & -g), specify 'contains', or"
+ " 'exact'. Note that the process name match performs the following"
+ " conversions: change all process names to lowecase, strip off the file"
+ " extension, like '.exe' on Windows. Default is 'contains'."))
+ parser.add_option('-p', '--process-names', dest='process_names',
help='Comma separated list of process names to analyze')
- parser.add_option('-g', '--go-process-names',
- dest='go_process_names',
+ parser.add_option('-g', '--go-process-names', dest='go_process_names',
help='Comma separated list of go process names to analyze')
- parser.add_option('-d', '--process-ids',
- dest='process_ids',
- default=None,
+ parser.add_option('-d', '--process-ids', dest='process_ids', default=None,
help='Comma separated list of process ids (PID) to analyze, overrides -p &'
- ' -g')
- parser.add_option('-c', '--dump-core',
- dest='dump_core',
- action="store_true",
- default=False,
+ ' -g')
+ parser.add_option('-c', '--dump-core', dest='dump_core', action="store_true", default=False,
help='Dump core file for each analyzed process')
- parser.add_option('-s', '--max-core-dumps-size',
- dest='max_core_dumps_size',
- default=10000,
+ parser.add_option('-s', '--max-core-dumps-size', dest='max_core_dumps_size', default=10000,
help='Maximum total size of core dumps to keep in megabytes')
- parser.add_option('-o', '--debugger-output',
- dest='debugger_output',
- action="append",
- choices=['file', 'stdout'],
- default=None,
- help="If 'stdout', then the debugger's output is written to the Python"
- " process's stdout. If 'file', then the debugger's output is written"
- " to a file named debugger_<process>_<pid>.log for each process it"
- " attaches to. This option can be specified multiple times on the"
- " command line to have the debugger's output written to multiple"
- " locations. By default, the debugger's output is written only to the"
- " Python process's stdout.")
+ parser.add_option('-o', '--debugger-output', dest='debugger_output', action="append",
+ choices=['file', 'stdout'], default=None,
+ help=("If 'stdout', then the debugger's output is written to the Python"
+ " process's stdout. If 'file', then the debugger's output is written"
+ " to a file named debugger_<process>_<pid>.log for each process it"
+ " attaches to. This option can be specified multiple times on the"
+ " command line to have the debugger's output written to multiple"
+ " locations. By default, the debugger's output is written only to the"
+ " Python process's stdout."))
(options, args) = parser.parse_args()
@@ -680,12 +640,12 @@ def main():
running_pids = set([pid for (pid, pname) in all_processes])
missing_pids = set(process_ids) - running_pids
if missing_pids:
- root_logger.warning("The following requested process ids are not running %s" %
- list(missing_pids))
+ root_logger.warning(
+ "The following requested process ids are not running %s" % list(missing_pids))
else:
processes = [(pid, pname) for (pid, pname) in all_processes
- if pname_match(options.process_match, pname, interesting_processes) and
- pid != os.getpid()]
+ if pname_match(options.process_match, pname, interesting_processes)
+ and pid != os.getpid()]
root_logger.info("Found %d interesting processes %s" % (len(processes), processes))
@@ -708,16 +668,12 @@ def main():
trapped_exceptions = []
# Dump all processes, except python & java.
- for (pid, process_name) in [(p, pn) for (p, pn) in processes
- if not re.match("^(java|python)", pn)]:
+ for (pid,
+ process_name) in [(p, pn) for (p, pn) in processes if not re.match("^(java|python)", pn)]:
process_logger = get_process_logger(options.debugger_output, pid, process_name)
try:
- dbg.dump_info(
- root_logger,
- process_logger,
- pid,
- process_name,
- options.dump_core and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
+ dbg.dump_info(root_logger, process_logger, pid, process_name, options.dump_core
+ and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
except Exception as err:
root_logger.info("Error encountered when invoking debugger %s" % err)
trapped_exceptions.append(traceback.format_exc())
@@ -736,8 +692,8 @@ def main():
# TerminateProcess.
# Note: The stacktrace output may be captured elsewhere (i.e. resmoke).
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn in go_processes]:
- root_logger.info("Sending signal SIGABRT to go process %s with PID %d" %
- (process_name, pid))
+ root_logger.info("Sending signal SIGABRT to go process %s with PID %d" % (process_name,
+ pid))
signal_process(root_logger, pid, signal.SIGABRT)
root_logger.info("Done analyzing all processes for hangs")
@@ -747,5 +703,6 @@ def main():
if trapped_exceptions:
sys.exit(1)
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/idl/idl/ast.py b/buildscripts/idl/idl/ast.py
index ef5a0aba44d..e5eda3b8426 100644
--- a/buildscripts/idl/idl/ast.py
+++ b/buildscripts/idl/idl/ast.py
@@ -35,8 +35,8 @@ class IDLBoundSpec(object):
def __init__(self, spec, error_collection):
# type: (IDLAST, errors.ParserErrorCollection) -> None
"""Must specify either an IDL document or errors, not both."""
- assert (spec is None and error_collection is not None) or (spec is not None and
- error_collection is None)
+ assert (spec is None and error_collection is not None) or (spec is not None
+ and error_collection is None)
self.spec = spec
self.errors = error_collection
diff --git a/buildscripts/idl/idl/bson.py b/buildscripts/idl/idl/bson.py
index 214b67a7bfe..21fb8961f5a 100644
--- a/buildscripts/idl/idl/bson.py
+++ b/buildscripts/idl/idl/bson.py
@@ -26,77 +26,29 @@ from typing import Dict, List
# scalar: True if the type is not an array or object
# bson_type_enum: The BSONType enum value for the given type
_BSON_TYPE_INFORMATION = {
- "double": {
- 'scalar': True,
- 'bson_type_enum': 'NumberDouble'
- },
- "string": {
- 'scalar': True,
- 'bson_type_enum': 'String'
- },
- "object": {
- 'scalar': False,
- 'bson_type_enum': 'Object'
- },
+ "double": {'scalar': True, 'bson_type_enum': 'NumberDouble'},
+ "string": {'scalar': True, 'bson_type_enum': 'String'},
+ "object": {'scalar': False, 'bson_type_enum': 'Object'},
# TODO: add support: "array" : { 'scalar' : False, 'bson_type_enum' : 'Array'},
- "bindata": {
- 'scalar': True,
- 'bson_type_enum': 'BinData'
- },
- "undefined": {
- 'scalar': True,
- 'bson_type_enum': 'Undefined'
- },
- "objectid": {
- 'scalar': True,
- 'bson_type_enum': 'jstOID'
- },
- "bool": {
- 'scalar': True,
- 'bson_type_enum': 'Bool'
- },
- "date": {
- 'scalar': True,
- 'bson_type_enum': 'Date'
- },
- "null": {
- 'scalar': True,
- 'bson_type_enum': 'jstNULL'
- },
- "regex": {
- 'scalar': True,
- 'bson_type_enum': 'RegEx'
- },
- "int": {
- 'scalar': True,
- 'bson_type_enum': 'NumberInt'
- },
- "timestamp": {
- 'scalar': True,
- 'bson_type_enum': 'bsonTimestamp'
- },
- "long": {
- 'scalar': True,
- 'bson_type_enum': 'NumberLong'
- },
- "decimal": {
- 'scalar': True,
- 'bson_type_enum': 'NumberDecimal'
- },
+ "bindata": {'scalar': True, 'bson_type_enum': 'BinData'},
+ "undefined": {'scalar': True, 'bson_type_enum': 'Undefined'},
+ "objectid": {'scalar': True, 'bson_type_enum': 'jstOID'},
+ "bool": {'scalar': True, 'bson_type_enum': 'Bool'},
+ "date": {'scalar': True, 'bson_type_enum': 'Date'},
+ "null": {'scalar': True, 'bson_type_enum': 'jstNULL'},
+ "regex": {'scalar': True, 'bson_type_enum': 'RegEx'},
+ "int": {'scalar': True, 'bson_type_enum': 'NumberInt'},
+ "timestamp": {'scalar': True, 'bson_type_enum': 'bsonTimestamp'},
+ "long": {'scalar': True, 'bson_type_enum': 'NumberLong'},
+ "decimal": {'scalar': True, 'bson_type_enum': 'NumberDecimal'},
}
# Dictionary of BinData subtype type Information
# scalar: True if the type is not an array or object
# bindata_enum: The BinDataType enum value for the given type
_BINDATA_SUBTYPE = {
- "generic": {
- 'scalar': True,
- 'bindata_enum': 'BinDataGeneral'
- },
- "function": {
- 'scalar': True,
- 'bindata_enum': 'Function'
- },
+ "generic": {'scalar': True, 'bindata_enum': 'BinDataGeneral'},
+ "function": {'scalar': True, 'bindata_enum': 'Function'},
# Also simply known as type 2, deprecated, and requires special handling
#"binary": {
# 'scalar': False,
@@ -107,14 +59,8 @@ _BINDATA_SUBTYPE = {
# 'scalar': False,
# 'bindata_enum': 'bdtUUID'
# },
- "uuid": {
- 'scalar': True,
- 'bindata_enum': 'newUUID'
- },
- "md5": {
- 'scalar': True,
- 'bindata_enum': 'MD5Type'
- },
+ "uuid": {'scalar': True, 'bindata_enum': 'newUUID'},
+ "md5": {'scalar': True, 'bindata_enum': 'MD5Type'},
}
diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py
index bb85f4b67e8..bf7bbeeb629 100644
--- a/buildscripts/idl/idl/cpp_types.py
+++ b/buildscripts/idl/idl/cpp_types.py
@@ -266,10 +266,8 @@ class _CppTypeView(CppTypeBase):
def get_setter_body(self, member_name):
# type: (unicode) -> unicode
- return common.template_args(
- '${member_name} = ${value};',
- member_name=member_name,
- value=self.get_transform_to_storage_type("value"))
+ return common.template_args('${member_name} = ${value};', member_name=member_name,
+ value=self.get_transform_to_storage_type("value"))
def get_transform_to_getter_type(self, expression):
# type: (unicode) -> Optional[unicode]
@@ -279,7 +277,8 @@ class _CppTypeView(CppTypeBase):
# type: (unicode) -> Optional[unicode]
return common.template_args(
'${expression}.toString()',
- expression=expression, )
+ expression=expression,
+ )
class _CppTypeVector(CppTypeBase):
@@ -325,10 +324,8 @@ class _CppTypeVector(CppTypeBase):
def get_setter_body(self, member_name):
# type: (unicode) -> unicode
- return common.template_args(
- '${member_name} = ${value};',
- member_name=member_name,
- value=self.get_transform_to_storage_type("value"))
+ return common.template_args('${member_name} = ${value};', member_name=member_name,
+ value=self.get_transform_to_storage_type("value"))
def get_transform_to_getter_type(self, expression):
# type: (unicode) -> Optional[unicode]
@@ -432,8 +429,8 @@ class _CppTypeArray(_CppTypeDelegating):
# type: (unicode) -> unicode
convert = self.get_transform_to_storage_type("value")
if convert:
- return common.template_args(
- '${member_name} = ${convert};', member_name=member_name, convert=convert)
+ return common.template_args('${member_name} = ${convert};', member_name=member_name,
+ convert=convert)
else:
return self._base.get_setter_body(member_name)
@@ -442,7 +439,8 @@ class _CppTypeArray(_CppTypeDelegating):
if self._base.get_storage_type() != self._base.get_getter_setter_type():
return common.template_args(
'transformVector(${expression})',
- expression=expression, )
+ expression=expression,
+ )
else:
return None
@@ -451,7 +449,8 @@ class _CppTypeArray(_CppTypeDelegating):
if self._base.get_storage_type() != self._base.get_getter_setter_type():
return common.template_args(
'transformVector(${expression})',
- expression=expression, )
+ expression=expression,
+ )
else:
return None
@@ -497,15 +496,12 @@ class _CppTypeOptional(_CppTypeDelegating):
} else {
return boost::none;
}
- """),
- member_name=member_name,
- convert=convert)
+ """), member_name=member_name, convert=convert)
elif self.is_view_type():
# For optionals around view types, do an explicit construction
- return common.template_args(
- 'return ${param_type}{${member_name}};',
- param_type=self.get_getter_setter_type(),
- member_name=member_name)
+ return common.template_args('return ${param_type}{${member_name}};',
+ param_type=self.get_getter_setter_type(),
+ member_name=member_name)
else:
return common.template_args('return ${member_name};', member_name=member_name)
@@ -520,9 +516,7 @@ class _CppTypeOptional(_CppTypeDelegating):
} else {
${member_name} = boost::none;
}
- """),
- member_name=member_name,
- convert=convert)
+ """), member_name=member_name, convert=convert)
else:
return self._base.get_setter_body(member_name)
@@ -590,11 +584,11 @@ def _call_method_or_global_function(expression, method_name):
"""
short_method_name = writer.get_method_name(method_name)
if writer.is_function(method_name):
- return common.template_args(
- '${method_name}(${expression})', expression=expression, method_name=short_method_name)
+ return common.template_args('${method_name}(${expression})', expression=expression,
+ method_name=short_method_name)
- return common.template_args(
- '${expression}.${method_name}()', expression=expression, method_name=short_method_name)
+ return common.template_args('${expression}.${method_name}()', expression=expression,
+ method_name=short_method_name)
class _CommonBsonCppTypeBase(BsonCppTypeBase):
@@ -607,10 +601,9 @@ class _CommonBsonCppTypeBase(BsonCppTypeBase):
def gen_deserializer_expression(self, indented_writer, object_instance):
# type: (writer.IndentedTextWriter, unicode) -> unicode
- return common.template_args(
- '${object_instance}.${method_name}()',
- object_instance=object_instance,
- method_name=self._deserialize_method_name)
+ return common.template_args('${object_instance}.${method_name}()',
+ object_instance=object_instance,
+ method_name=self._deserialize_method_name)
def has_serializer(self):
# type: () -> bool
@@ -633,9 +626,8 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
if self._field.deserializer:
# Call a method like: Class::method(const BSONObj& value)
indented_writer.write_line(
- common.template_args(
- 'const BSONObj localObject = ${object_instance}.Obj();',
- object_instance=object_instance))
+ common.template_args('const BSONObj localObject = ${object_instance}.Obj();',
+ object_instance=object_instance))
return "localObject"
else:
@@ -650,10 +642,8 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
# type: (writer.IndentedTextWriter, unicode) -> unicode
method_name = writer.get_method_name(self._field.serializer)
indented_writer.write_line(
- common.template_args(
- 'const BSONObj localObject = ${expression}.${method_name}();',
- expression=expression,
- method_name=method_name))
+ common.template_args('const BSONObj localObject = ${expression}.${method_name}();',
+ expression=expression, method_name=method_name))
return "localObject"
@@ -667,11 +657,11 @@ class _BinDataBsonCppTypeBase(BsonCppTypeBase):
def gen_deserializer_expression(self, indented_writer, object_instance):
# type: (writer.IndentedTextWriter, unicode) -> unicode
if self._field.bindata_subtype == 'uuid':
- return common.template_args(
- '${object_instance}.uuid()', object_instance=object_instance)
+ return common.template_args('${object_instance}.uuid()',
+ object_instance=object_instance)
else:
- return common.template_args(
- '${object_instance}._binDataVector()', object_instance=object_instance)
+ return common.template_args('${object_instance}._binDataVector()',
+ object_instance=object_instance)
def has_serializer(self):
# type: () -> bool
@@ -682,14 +672,12 @@ class _BinDataBsonCppTypeBase(BsonCppTypeBase):
if self._field.serializer:
method_name = writer.get_method_name(self._field.serializer)
indented_writer.write_line(
- common.template_args(
- 'ConstDataRange tempCDR = ${expression}.${method_name}();',
- expression=expression,
- method_name=method_name))
+ common.template_args('ConstDataRange tempCDR = ${expression}.${method_name}();',
+ expression=expression, method_name=method_name))
else:
indented_writer.write_line(
- common.template_args(
- 'ConstDataRange tempCDR = makeCDR(${expression});', expression=expression))
+ common.template_args('ConstDataRange tempCDR = makeCDR(${expression});',
+ expression=expression))
return common.template_args(
'BSONBinData(tempCDR.data(), tempCDR.length(), ${bindata_subtype})',
diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py
index e9e7214d2ad..d66fb4010d0 100644
--- a/buildscripts/idl/idl/enum_types.py
+++ b/buildscripts/idl/idl/enum_types.py
@@ -60,8 +60,8 @@ class EnumTypeInfoBase(object):
def _get_enum_deserializer_name(self):
# type: () -> unicode
"""Return the name of deserializer function without prefix."""
- return common.template_args(
- "${enum_name}_parse", enum_name=common.title_case(self._enum.name))
+ return common.template_args("${enum_name}_parse", enum_name=common.title_case(
+ self._enum.name))
def get_enum_deserializer_name(self):
# type: () -> unicode
@@ -72,8 +72,8 @@ class EnumTypeInfoBase(object):
def _get_enum_serializer_name(self):
# type: () -> unicode
"""Return the name of serializer function without prefix."""
- return common.template_args(
- "${enum_name}_serializer", enum_name=common.title_case(self._enum.name))
+ return common.template_args("${enum_name}_serializer", enum_name=common.title_case(
+ self._enum.name))
def get_enum_serializer_name(self):
# type: () -> unicode
@@ -137,8 +137,7 @@ class _EnumTypeInt(EnumTypeInfoBase):
# type: () -> unicode
return common.template_args(
"${enum_name} ${function_name}(const IDLParserErrorContext& ctxt, std::int32_t value)",
- enum_name=self.get_cpp_type_name(),
- function_name=self._get_enum_deserializer_name())
+ enum_name=self.get_cpp_type_name(), function_name=self._get_enum_deserializer_name())
def gen_deserializer_definition(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
@@ -168,10 +167,9 @@ class _EnumTypeInt(EnumTypeInfoBase):
def get_serializer_declaration(self):
# type: () -> unicode
"""Get the serializer function declaration minus trailing semicolon."""
- return common.template_args(
- "std::int32_t ${function_name}(${enum_name} value)",
- enum_name=self.get_cpp_type_name(),
- function_name=self._get_enum_serializer_name())
+ return common.template_args("std::int32_t ${function_name}(${enum_name} value)",
+ enum_name=self.get_cpp_type_name(),
+ function_name=self._get_enum_serializer_name())
def gen_serializer_definition(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
@@ -189,8 +187,8 @@ class _EnumTypeInt(EnumTypeInfoBase):
def _get_constant_enum_name(idl_enum, enum_value):
# type: (Union[syntax.Enum,ast.Enum], Union[syntax.EnumValue,ast.EnumValue]) -> unicode
"""Return the C++ name for a string constant of string enum value."""
- return common.template_args(
- 'k${enum_name}_${name}', enum_name=common.title_case(idl_enum.name), name=enum_value.name)
+ return common.template_args('k${enum_name}_${name}', enum_name=common.title_case(idl_enum.name),
+ name=enum_value.name)
class _EnumTypeString(EnumTypeInfoBase):
@@ -204,8 +202,8 @@ class _EnumTypeString(EnumTypeInfoBase):
def get_cpp_type_name(self):
# type: () -> unicode
- return common.template_args(
- "${enum_name}Enum", enum_name=common.title_case(self._enum.name))
+ return common.template_args("${enum_name}Enum", enum_name=common.title_case(
+ self._enum.name))
def get_bson_types(self):
# type: () -> List[unicode]
@@ -219,8 +217,7 @@ class _EnumTypeString(EnumTypeInfoBase):
# type: () -> unicode
return common.template_args(
"${enum_name} ${function_name}(const IDLParserErrorContext& ctxt, StringData value)",
- enum_name=self.get_cpp_type_name(),
- function_name=self._get_enum_deserializer_name())
+ enum_name=self.get_cpp_type_name(), function_name=self._get_enum_deserializer_name())
def gen_deserializer_definition(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
@@ -234,17 +231,16 @@ class _EnumTypeString(EnumTypeInfoBase):
with writer.NamespaceScopeBlock(indented_writer, ['']):
for enum_value in self._enum.values:
indented_writer.write_line(
- common.template_args(
- 'constexpr StringData ${constant_name} = "${value}"_sd;',
- constant_name=_get_constant_enum_name(self._enum, enum_value),
- value=enum_value.value))
+ common.template_args('constexpr StringData ${constant_name} = "${value}"_sd;',
+ constant_name=_get_constant_enum_name(
+ self._enum, enum_value), value=enum_value.value))
indented_writer.write_empty_line()
with writer.TemplateContext(indented_writer, template_params):
with writer.IndentedScopedBlock(indented_writer, "${function_name} {", "}"):
for enum_value in self._enum.values:
- predicate = 'if (value == %s) {' % (_get_constant_enum_name(self._enum,
- enum_value))
+ predicate = 'if (value == %s) {' % (
+ _get_constant_enum_name(self._enum, enum_value))
with writer.IndentedScopedBlock(indented_writer, predicate, "}"):
indented_writer.write_template('return ${enum_name}::%s;' %
(enum_value.name))
@@ -254,10 +250,9 @@ class _EnumTypeString(EnumTypeInfoBase):
def get_serializer_declaration(self):
# type: () -> unicode
"""Get the serializer function declaration minus trailing semicolon."""
- return common.template_args(
- "StringData ${function_name}(${enum_name} value)",
- enum_name=self.get_cpp_type_name(),
- function_name=self._get_enum_serializer_name())
+ return common.template_args("StringData ${function_name}(${enum_name} value)",
+ enum_name=self.get_cpp_type_name(),
+ function_name=self._get_enum_serializer_name())
def gen_serializer_definition(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
@@ -273,8 +268,8 @@ class _EnumTypeString(EnumTypeInfoBase):
with writer.IndentedScopedBlock(indented_writer,
'if (value == ${enum_name}::%s) {' %
(enum_value.name), "}"):
- indented_writer.write_line('return %s;' % (_get_constant_enum_name(
- self._enum, enum_value)))
+ indented_writer.write_line(
+ 'return %s;' % (_get_constant_enum_name(self._enum, enum_value)))
indented_writer.write_line('MONGO_UNREACHABLE;')
indented_writer.write_line('return StringData();')
diff --git a/buildscripts/idl/idl/errors.py b/buildscripts/idl/idl/errors.py
index ed2963f1251..106644e0ded 100644
--- a/buildscripts/idl/idl/errors.py
+++ b/buildscripts/idl/idl/errors.py
@@ -210,9 +210,10 @@ class ParserContext(object):
def add_unknown_root_node_error(self, node):
# type: (yaml.nodes.Node) -> None
"""Add an error about an unknown YAML root node."""
- self._add_node_error(node, ERROR_ID_UNKNOWN_ROOT, (
- "Unrecognized IDL specification root level node '%s', only " +
- " (global, import, types, commands, and structs) are accepted") % (node.value))
+ self._add_node_error(node, ERROR_ID_UNKNOWN_ROOT,
+ ("Unrecognized IDL specification root level node '%s', only " +
+ " (global, import, types, commands, and structs) are accepted") %
+ (node.value))
def add_unknown_node_error(self, node, name):
# type: (yaml.nodes.Node, unicode) -> None
@@ -287,9 +288,9 @@ class ParserContext(object):
return False
if not (node.value == "true" or node.value == "false"):
- self._add_node_error(node, ERROR_ID_IS_NODE_VALID_BOOL,
- "Illegal bool value for '%s', expected either 'true' or 'false'." %
- node_name)
+ self._add_node_error(
+ node, ERROR_ID_IS_NODE_VALID_BOOL,
+ "Illegal bool value for '%s', expected either 'true' or 'false'." % node_name)
return False
return True
@@ -331,16 +332,16 @@ class ParserContext(object):
"""Add an error about a YAML node missing a required child."""
# pylint: disable=invalid-name
self._add_node_error(node, ERROR_ID_MISSING_REQUIRED_FIELD,
- "IDL node '%s' is missing required scalar '%s'" %
- (node_parent, node_name))
+ "IDL node '%s' is missing required scalar '%s'" % (node_parent,
+ node_name))
def add_missing_ast_required_field_error(self, location, ast_type, ast_parent, ast_name):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
"""Add an error about a AST node missing a required child."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_MISSING_AST_REQUIRED_FIELD,
- "%s '%s' is missing required scalar '%s'" %
- (ast_type, ast_parent, ast_name))
+ "%s '%s' is missing required scalar '%s'" % (ast_type, ast_parent,
+ ast_name))
def add_array_not_valid_error(self, location, ast_type, name):
# type: (common.SourceLocation, unicode, unicode) -> None
@@ -352,8 +353,8 @@ class ParserContext(object):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
"""Add an error about a bad bson type."""
self._add_error(location, ERROR_ID_BAD_BSON_TYPE,
- "BSON Type '%s' is not recognized for %s '%s'." %
- (bson_type_name, ast_type, ast_parent))
+ "BSON Type '%s' is not recognized for %s '%s'." % (bson_type_name, ast_type,
+ ast_parent))
def add_bad_bson_scalar_type_error(self, location, ast_type, ast_parent, bson_type_name):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
@@ -390,9 +391,10 @@ class ParserContext(object):
# type: (common.SourceLocation, unicode, unicode) -> None
"""Add an error about field must be empty for ignored fields."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_IGNORED, (
- "Field '%s' cannot contain a value for property '%s' when a field is marked as ignored")
- % (name, field_name))
+ self._add_error(
+ location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_IGNORED,
+ ("Field '%s' cannot contain a value for property '%s' when a field is marked as ignored"
+ ) % (name, field_name))
def add_struct_field_must_be_empty_error(self, location, name, field_name):
# type: (common.SourceLocation, unicode, unicode) -> None
@@ -407,27 +409,31 @@ class ParserContext(object):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
# pylint: disable=invalid-name
"""Add an error about field must be empty for fields of type struct."""
- self._add_error(location, ERROR_ID_CUSTOM_SCALAR_SERIALIZATION_NOT_SUPPORTED, (
- "Custom serialization for a scalar is only supported for 'string'. The %s '%s' cannot" +
- " use bson type '%s', use a bson_serialization_type of 'any' instead.") %
- (ast_type, ast_parent, bson_type_name))
+ self._add_error(
+ location, ERROR_ID_CUSTOM_SCALAR_SERIALIZATION_NOT_SUPPORTED,
+ ("Custom serialization for a scalar is only supported for 'string'. The %s '%s' cannot"
+ + " use bson type '%s', use a bson_serialization_type of 'any' instead.") %
+ (ast_type, ast_parent, bson_type_name))
def add_bad_any_type_use_error(self, location, bson_type, ast_type, ast_parent):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
# pylint: disable=invalid-name
"""Add an error about any being used in a list of bson types."""
- self._add_error(location, ERROR_ID_BAD_ANY_TYPE_USE, (
- "The BSON Type '%s' is not allowed in a list of bson serialization types for" +
- "%s '%s'. It must be only a single bson type.") % (bson_type, ast_type, ast_parent))
+ self._add_error(
+ location, ERROR_ID_BAD_ANY_TYPE_USE,
+ ("The BSON Type '%s' is not allowed in a list of bson serialization types for" +
+ "%s '%s'. It must be only a single bson type.") % (bson_type, ast_type, ast_parent))
def add_bad_cpp_numeric_type_use_error(self, location, ast_type, ast_parent, cpp_type):
# type: (common.SourceLocation, unicode, unicode, unicode) -> None
# pylint: disable=invalid-name
"""Add an error about any being used in a list of bson types."""
- self._add_error(location, ERROR_ID_BAD_NUMERIC_CPP_TYPE, (
- "The C++ numeric type '%s' is not allowed for %s '%s'. Only 'std::int32_t'," +
- " 'std::uint32_t', 'std::uint64_t', and 'std::int64_t' are supported.") %
- (cpp_type, ast_type, ast_parent))
+ self._add_error(
+ location, ERROR_ID_BAD_NUMERIC_CPP_TYPE,
+ ("The C++ numeric type '%s' is not allowed for %s '%s'. Only 'std::int32_t'," +
+ " 'std::uint32_t', 'std::uint64_t', and 'std::int64_t' are supported.") % (cpp_type,
+ ast_type,
+ ast_parent))
def add_bad_array_type_name_error(self, location, field_name, type_name):
# type: (common.SourceLocation, unicode, unicode) -> None
@@ -555,9 +561,10 @@ class ParserContext(object):
# type: (common.SourceLocation, unicode, unicode) -> None
"""Add an error about field must be empty for fields of type enum."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_ENUM, (
- "Field '%s' cannot contain a value for property '%s' when a field's type is a enum") %
- (name, field_name))
+ self._add_error(
+ location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_ENUM,
+ ("Field '%s' cannot contain a value for property '%s' when a field's type is a enum") %
+ (name, field_name))
def add_bad_command_namespace_error(self, location, command_name, command_namespace,
valid_commands):
@@ -571,9 +578,10 @@ class ParserContext(object):
def add_bad_command_as_field_error(self, location, command_name):
# type: (common.SourceLocation, unicode) -> None
"""Add an error about using a command for a field."""
- self._add_error(location, ERROR_ID_FIELD_NO_COMMAND,
- ("Command '%s' cannot be used as a field type'. Commands must be top-level"
- + " types due to their serialization rules.") % (command_name))
+ self._add_error(
+ location, ERROR_ID_FIELD_NO_COMMAND,
+ ("Command '%s' cannot be used as a field type'. Commands must be top-level" +
+ " types due to their serialization rules.") % (command_name))
def add_bad_array_of_chain(self, location, field_name):
# type: (common.SourceLocation, unicode) -> None
@@ -585,9 +593,10 @@ class ParserContext(object):
# type: (common.SourceLocation, unicode) -> None
"""Add an error about a field being optional and having a default value."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_ILLEGAL_FIELD_DEFAULT_AND_OPTIONAL, (
- "Field '%s' can only be marked as optional or have a default value," + " not both.") %
- (field_name))
+ self._add_error(
+ location, ERROR_ID_ILLEGAL_FIELD_DEFAULT_AND_OPTIONAL,
+ ("Field '%s' can only be marked as optional or have a default value," + " not both.") %
+ (field_name))
def add_bad_struct_field_as_doc_sequence_error(self, location, struct_name, field_name):
# type: (common.SourceLocation, unicode, unicode) -> None
@@ -637,8 +646,8 @@ class ParserContext(object):
except ValueError as value_error:
self._add_node_error(node, ERROR_ID_IS_NODE_VALID_INT,
- "Illegal integer value for '%s', message '%s'." %
- (node_name, value_error))
+ "Illegal integer value for '%s', message '%s'." % (node_name,
+ value_error))
return False
return True
diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py
index 3433ecde702..4e4f174f35b 100644
--- a/buildscripts/idl/idl/generator.py
+++ b/buildscripts/idl/idl/generator.py
@@ -72,8 +72,8 @@ def _is_required_serializer_field(field):
def _get_field_constant_name(field):
# type: (ast.Field) -> unicode
"""Get the C++ string constant name for a field."""
- return common.template_args(
- 'k${constant_name}FieldName', constant_name=common.title_case(field.cpp_name))
+ return common.template_args('k${constant_name}FieldName', constant_name=common.title_case(
+ field.cpp_name))
def _access_member(field):
@@ -188,8 +188,8 @@ class _SlowFieldUsageChecker(_FieldUsageCheckerBase):
(_get_field_constant_name(field))
with writer.IndentedScopedBlock(self._writer, pred, '}'):
if field.default:
- self._writer.write_line('%s = %s;' %
- (_get_field_member_name(field), field.default))
+ self._writer.write_line('%s = %s;' % (_get_field_member_name(field),
+ field.default))
else:
self._writer.write_line('ctxt.throwMissingField(%s);' %
(_get_field_constant_name(field)))
@@ -221,8 +221,8 @@ class _FastFieldUsageChecker(_FieldUsageCheckerBase):
if field.chained:
continue
- self._writer.write_line('const size_t %s = %d;' %
- (_gen_field_usage_constant(field), bit_id))
+ self._writer.write_line('const size_t %s = %d;' % (_gen_field_usage_constant(field),
+ bit_id))
bit_id += 1
def add_store(self, field_name):
@@ -255,12 +255,13 @@ class _FastFieldUsageChecker(_FieldUsageCheckerBase):
(_gen_field_usage_constant(field)), '}'):
if field.default:
if field.chained_struct_field:
- self._writer.write_line('%s.%s(%s);' % (
- _get_field_member_name(field.chained_struct_field),
- _get_field_member_setter_name(field), field.default))
- else:
self._writer.write_line(
- '%s = %s;' % (_get_field_member_name(field), field.default))
+ '%s.%s(%s);' %
+ (_get_field_member_name(field.chained_struct_field),
+ _get_field_member_setter_name(field), field.default))
+ else:
+ self._writer.write_line('%s = %s;' % (_get_field_member_name(field),
+ field.default))
else:
self._writer.write_line('ctxt.throwMissingField(%s);' %
(_get_field_constant_name(field)))
@@ -452,9 +453,9 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
if field.chained_struct_field:
self._writer.write_template(
- '${const_type} ${param_type} ${method_name}() const { return %s.%s(); }' % (
- (_get_field_member_name(field.chained_struct_field),
- _get_field_member_getter_name(field))))
+ '${const_type} ${param_type} ${method_name}() const { return %s.%s(); }' %
+ ((_get_field_member_name(field.chained_struct_field),
+ _get_field_member_getter_name(field))))
elif cpp_type_info.disable_xvalue():
self._writer.write_template(
@@ -492,8 +493,8 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
}
with self._with_template(template_params):
- self._writer.write_template('void ${method_name}(${param_type} value) & ' +
- '{ ${body} ${post_body} }')
+ self._writer.write_template(
+ 'void ${method_name}(${param_type} value) & ' + '{ ${body} ${post_body} }')
self._writer.write_empty_line()
@@ -524,16 +525,14 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
for field in _get_all_fields(struct):
self._writer.write_line(
- common.template_args(
- 'static constexpr auto ${constant_name} = "${field_name}"_sd;',
- constant_name=_get_field_constant_name(field),
- field_name=field.name))
+ common.template_args('static constexpr auto ${constant_name} = "${field_name}"_sd;',
+ constant_name=_get_field_constant_name(field),
+ field_name=field.name))
if isinstance(struct, ast.Command):
self._writer.write_line(
- common.template_args(
- 'static constexpr auto kCommandName = "${struct_name}"_sd;',
- struct_name=struct.name))
+ common.template_args('static constexpr auto kCommandName = "${struct_name}"_sd;',
+ struct_name=struct.name))
def gen_enum_functions(self, idl_enum):
# type: (ast.Enum) -> None
@@ -553,10 +552,8 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
'};'):
for enum_value in idl_enum.values:
self._writer.write_line(
- common.template_args(
- '${name} ${value},',
- name=enum_value.name,
- value=enum_type_info.get_cpp_value_assignment(enum_value)))
+ common.template_args('${name} ${value},', name=enum_value.name,
+ value=enum_type_info.get_cpp_value_assignment(enum_value)))
def gen_op_msg_request_methods(self, command):
# type: (ast.Command) -> None
@@ -608,24 +605,21 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
"""Generate comparison operators definitions for the type."""
# pylint: disable=invalid-name
- sorted_fields = sorted(
- [
- field for field in struct.fields
- if (not field.ignore) and field.comparison_order != -1
- ],
- key=lambda f: f.comparison_order)
+ sorted_fields = sorted([
+ field for field in struct.fields if (not field.ignore) and field.comparison_order != -1
+ ], key=lambda f: f.comparison_order)
fields = [_get_field_member_name(field) for field in sorted_fields]
for rel_op in ['==', '!=', '<']:
decl = common.template_args(
"inline bool operator${rel_op}(const ${class_name}& left, const ${class_name}& right) {",
- rel_op=rel_op,
- class_name=common.title_case(struct.name))
+ rel_op=rel_op, class_name=common.title_case(struct.name))
with self._block(decl, "}"):
- self._writer.write_line('return std::tie(%s) %s std::tie(%s);' % (','.join(
- ["left.%s" % (field) for field in fields]), rel_op, ','.join(
- ["right.%s" % (field) for field in fields])))
+ self._writer.write_line('return std::tie(%s) %s std::tie(%s);' %
+ (','.join(["left.%s" % (field) for field in fields]),
+ rel_op,
+ ','.join(["right.%s" % (field) for field in fields])))
self.write_empty_line()
@@ -794,15 +788,11 @@ class _CppSourceFileWriter(_CppFileWriterBase):
if field.enum_type:
self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
(_get_field_constant_name(field)))
- return common.template_args(
- "${method_name}(tempContext, ${expression})",
- method_name=method_name,
- expression=expression)
+ return common.template_args("${method_name}(tempContext, ${expression})",
+ method_name=method_name, expression=expression)
else:
- return common.template_args(
- "${method_name}(${expression})",
- method_name=method_name,
- expression=expression)
+ return common.template_args("${method_name}(${expression})",
+ method_name=method_name, expression=expression)
else:
# BSONObjects are allowed to be pass through without deserialization
assert field.bson_serialization_type == ['object']
@@ -901,8 +891,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
(_get_field_member_name(field.chained_struct_field),
_get_field_member_setter_name(field), object_value))
else:
- self._writer.write_line('%s = %s;' %
- (_get_field_member_name(field), object_value))
+ self._writer.write_line('%s = %s;' % (_get_field_member_name(field),
+ object_value))
def gen_doc_sequence_deserializer(self, field):
# type: (ast.Field) -> None
@@ -980,8 +970,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
# Serialize has fields third
# Add _has{FIELD} bool members to ensure fields are set before serialization.
for field in struct.fields:
- if _is_required_serializer_field(field) and not (field.name == "$db" and
- initializes_db_name):
+ if _is_required_serializer_field(field) and not (field.name == "$db"
+ and initializes_db_name):
initializers.append('%s(false)' % _get_has_field_member_name(field))
if initializes_db_name:
@@ -1152,8 +1142,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
struct_type_info = struct_types.get_struct_info(struct)
self.get_bson_deserializer_static_common(
- struct,
- struct_type_info.get_op_msg_request_deserializer_static_method(),
+ struct, struct_type_info.get_op_msg_request_deserializer_static_method(),
struct_type_info.get_op_msg_request_deserializer_method())
func_def = struct_type_info.get_op_msg_request_deserializer_method().get_definition()
@@ -1227,8 +1216,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
template_params['expression'] = expression
self._writer.write_template('arrayBuilder.append(${expression});')
else:
- expression = bson_cpp_type.gen_serializer_expression(self._writer,
- _access_member(field))
+ expression = bson_cpp_type.gen_serializer_expression(
+ self._writer, _access_member(field))
template_params['expression'] = expression
self._writer.write_template('builder->append(${field_name}, ${expression});')
@@ -1304,8 +1293,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
# Is this a scalar bson C++ type?
bson_cpp_type = cpp_types.get_bson_cpp_type(field)
- needs_custom_serializer = field.serializer or (bson_cpp_type and
- bson_cpp_type.has_serializer())
+ needs_custom_serializer = field.serializer or (bson_cpp_type
+ and bson_cpp_type.has_serializer())
optional_block_start = None
if field.optional:
@@ -1323,8 +1312,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
# Generate default serialization using BSONObjBuilder::append
# Note: BSONObjBuilder::append has overrides for std::vector also
self._writer.write_line(
- 'builder->append(%s, %s);' %
- (_get_field_constant_name(field), _access_member(field)))
+ 'builder->append(%s, %s);' % (_get_field_constant_name(field),
+ _access_member(field)))
else:
self._gen_serializer_method_struct(field)
@@ -1474,16 +1463,14 @@ class _CppSourceFileWriter(_CppFileWriterBase):
for field in _get_all_fields(struct):
self._writer.write_line(
- common.template_args(
- 'constexpr StringData ${class_name}::${constant_name};',
- class_name=common.title_case(struct.cpp_name),
- constant_name=_get_field_constant_name(field)))
+ common.template_args('constexpr StringData ${class_name}::${constant_name};',
+ class_name=common.title_case(struct.cpp_name),
+ constant_name=_get_field_constant_name(field)))
if isinstance(struct, ast.Command):
self._writer.write_line(
- common.template_args(
- 'constexpr StringData ${class_name}::kCommandName;',
- class_name=common.title_case(struct.cpp_name)))
+ common.template_args('constexpr StringData ${class_name}::kCommandName;',
+ class_name=common.title_case(struct.cpp_name)))
def gen_enum_definition(self, idl_enum):
# type: (ast.Enum) -> None
@@ -1511,13 +1498,12 @@ class _CppSourceFileWriter(_CppFileWriterBase):
for field in sorted_fields:
self._writer.write_line(
common.template_args(
- '${class_name}::${constant_name},',
- class_name=common.title_case(struct.cpp_name),
- constant_name=_get_field_constant_name(field)))
+ '${class_name}::${constant_name},', class_name=common.title_case(
+ struct.cpp_name), constant_name=_get_field_constant_name(field)))
self._writer.write_line(
- common.template_args(
- '${class_name}::kCommandName,', class_name=common.title_case(struct.cpp_name)))
+ common.template_args('${class_name}::kCommandName,', class_name=common.title_case(
+ struct.cpp_name)))
def generate(self, spec, header_file_name):
# type: (ast.IDLAST, unicode) -> None
diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py
index 2fdd1034b5b..90ec25b6bb6 100644
--- a/buildscripts/idl/idl/parser.py
+++ b/buildscripts/idl/idl/parser.py
@@ -97,8 +97,8 @@ def _generic_parser(
syntax_node.__dict__[first_name] = ctxt.get_list(second_node)
elif rule_desc.node_type == "mapping":
if ctxt.is_mapping_node(second_node, first_name):
- syntax_node.__dict__[first_name] = rule_desc.mapping_parser_func(ctxt,
- second_node)
+ syntax_node.__dict__[first_name] = rule_desc.mapping_parser_func(
+ ctxt, second_node)
else:
raise errors.IDLError("Unknown node_type '%s' for parser rule" %
(rule_desc.node_type))
@@ -177,15 +177,16 @@ def _parse_type(ctxt, spec, name, node):
idltype = syntax.Type(ctxt.file_name, node.start_mark.line, node.start_mark.column)
idltype.name = name
- _generic_parser(ctxt, node, "type", idltype, {
- "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "cpp_type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "bson_serialization_type": _RuleDesc('scalar_or_sequence', _RuleDesc.REQUIRED),
- "bindata_subtype": _RuleDesc('scalar'),
- "serializer": _RuleDesc('scalar'),
- "deserializer": _RuleDesc('scalar'),
- "default": _RuleDesc('scalar'),
- })
+ _generic_parser(
+ ctxt, node, "type", idltype, {
+ "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "cpp_type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "bson_serialization_type": _RuleDesc('scalar_or_sequence', _RuleDesc.REQUIRED),
+ "bindata_subtype": _RuleDesc('scalar'),
+ "serializer": _RuleDesc('scalar'),
+ "deserializer": _RuleDesc('scalar'),
+ "default": _RuleDesc('scalar'),
+ })
spec.symbols.add_type(ctxt, idltype)
@@ -196,16 +197,17 @@ def _parse_field(ctxt, name, node):
field = syntax.Field(ctxt.file_name, node.start_mark.line, node.start_mark.column)
field.name = name
- _generic_parser(ctxt, node, "field", field, {
- "description": _RuleDesc('scalar'),
- "cpp_name": _RuleDesc('scalar'),
- "type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "ignore": _RuleDesc("bool_scalar"),
- "optional": _RuleDesc("bool_scalar"),
- "default": _RuleDesc('scalar'),
- "supports_doc_sequence": _RuleDesc("bool_scalar"),
- "comparison_order": _RuleDesc("int_scalar"),
- })
+ _generic_parser(
+ ctxt, node, "field", field, {
+ "description": _RuleDesc('scalar'),
+ "cpp_name": _RuleDesc('scalar'),
+ "type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "ignore": _RuleDesc("bool_scalar"),
+ "optional": _RuleDesc("bool_scalar"),
+ "default": _RuleDesc('scalar'),
+ "supports_doc_sequence": _RuleDesc("bool_scalar"),
+ "comparison_order": _RuleDesc("int_scalar"),
+ })
return field
@@ -336,16 +338,17 @@ def _parse_struct(ctxt, spec, name, node):
struct = syntax.Struct(ctxt.file_name, node.start_mark.line, node.start_mark.column)
struct.name = name
- _generic_parser(ctxt, node, "struct", struct, {
- "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "fields": _RuleDesc('mapping', mapping_parser_func=_parse_fields),
- "chained_types": _RuleDesc('mapping', mapping_parser_func=_parse_chained_types),
- "chained_structs": _RuleDesc('mapping', mapping_parser_func=_parse_chained_structs),
- "strict": _RuleDesc("bool_scalar"),
- "inline_chained_structs": _RuleDesc("bool_scalar"),
- "immutable": _RuleDesc('bool_scalar'),
- "generate_comparison_operators": _RuleDesc("bool_scalar"),
- })
+ _generic_parser(
+ ctxt, node, "struct", struct, {
+ "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "fields": _RuleDesc('mapping', mapping_parser_func=_parse_fields),
+ "chained_types": _RuleDesc('mapping', mapping_parser_func=_parse_chained_types),
+ "chained_structs": _RuleDesc('mapping', mapping_parser_func=_parse_chained_structs),
+ "strict": _RuleDesc("bool_scalar"),
+ "inline_chained_structs": _RuleDesc("bool_scalar"),
+ "immutable": _RuleDesc('bool_scalar'),
+ "generate_comparison_operators": _RuleDesc("bool_scalar"),
+ })
# TODO: SHOULD WE ALLOW STRUCTS ONLY WITH CHAINED STUFF and no fields???
if struct.fields is None and struct.chained_types is None and struct.chained_structs is None:
@@ -392,11 +395,12 @@ def _parse_enum(ctxt, spec, name, node):
idl_enum = syntax.Enum(ctxt.file_name, node.start_mark.line, node.start_mark.column)
idl_enum.name = name
- _generic_parser(ctxt, node, "enum", idl_enum, {
- "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "values": _RuleDesc('mapping', mapping_parser_func=_parse_enum_values),
- })
+ _generic_parser(
+ ctxt, node, "enum", idl_enum, {
+ "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "type": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "values": _RuleDesc('mapping', mapping_parser_func=_parse_enum_values),
+ })
if idl_enum.values is None:
ctxt.add_empty_enum_error(node, idl_enum.name)
@@ -413,19 +417,20 @@ def _parse_command(ctxt, spec, name, node):
command = syntax.Command(ctxt.file_name, node.start_mark.line, node.start_mark.column)
command.name = name
- _generic_parser(ctxt, node, "command", command, {
- "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "chained_types": _RuleDesc('mapping', mapping_parser_func=_parse_chained_types),
- "chained_structs": _RuleDesc('mapping', mapping_parser_func=_parse_chained_structs),
- "fields": _RuleDesc('mapping', mapping_parser_func=_parse_fields),
- "namespace": _RuleDesc('scalar', _RuleDesc.REQUIRED),
- "cpp_name": _RuleDesc('scalar'),
- "type": _RuleDesc('scalar'),
- "strict": _RuleDesc("bool_scalar"),
- "inline_chained_structs": _RuleDesc("bool_scalar"),
- "immutable": _RuleDesc('bool_scalar'),
- "generate_comparison_operators": _RuleDesc("bool_scalar"),
- })
+ _generic_parser(
+ ctxt, node, "command", command, {
+ "description": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "chained_types": _RuleDesc('mapping', mapping_parser_func=_parse_chained_types),
+ "chained_structs": _RuleDesc('mapping', mapping_parser_func=_parse_chained_structs),
+ "fields": _RuleDesc('mapping', mapping_parser_func=_parse_fields),
+ "namespace": _RuleDesc('scalar', _RuleDesc.REQUIRED),
+ "cpp_name": _RuleDesc('scalar'),
+ "type": _RuleDesc('scalar'),
+ "strict": _RuleDesc("bool_scalar"),
+ "inline_chained_structs": _RuleDesc("bool_scalar"),
+ "immutable": _RuleDesc('bool_scalar'),
+ "generate_comparison_operators": _RuleDesc("bool_scalar"),
+ })
# TODO: support the first argument as UUID depending on outcome of Catalog Versioning changes.
valid_commands = [
diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py
index 81a845daf2c..4602bfa01a0 100644
--- a/buildscripts/idl/idl/struct_types.py
+++ b/buildscripts/idl/idl/struct_types.py
@@ -44,13 +44,7 @@ class ArgumentInfo(object):
class MethodInfo(object):
"""Class that encapslates information about a method and how to declare, define, and call it."""
- def __init__(self,
- class_name,
- method_name,
- args,
- return_type=None,
- static=False,
- const=False,
+ def __init__(self, class_name, method_name, args, return_type=None, static=False, const=False,
explicit=False):
# type: (unicode, unicode, List[unicode], unicode, bool, bool, bool) -> None
# pylint: disable=too-many-arguments
@@ -84,11 +78,8 @@ class MethodInfo(object):
return common.template_args(
"${pre_modifiers}${return_type}${method_name}(${args})${post_modifiers};",
- pre_modifiers=pre_modifiers,
- return_type=return_type_str,
- method_name=self.method_name,
- args=', '.join([str(arg) for arg in self.args]),
- post_modifiers=post_modifiers)
+ pre_modifiers=pre_modifiers, return_type=return_type_str, method_name=self.method_name,
+ args=', '.join([str(arg) for arg in self.args]), post_modifiers=post_modifiers)
def get_definition(self):
# type: () -> unicode
@@ -105,12 +96,9 @@ class MethodInfo(object):
return common.template_args(
"${pre_modifiers}${return_type}${class_name}::${method_name}(${args})${post_modifiers}",
- pre_modifiers=pre_modifiers,
- return_type=return_type_str,
- class_name=self.class_name,
- method_name=self.method_name,
- args=', '.join([str(arg) for arg in self.args]),
- post_modifiers=post_modifiers)
+ pre_modifiers=pre_modifiers, return_type=return_type_str, class_name=self.class_name,
+ method_name=self.method_name, args=', '.join(
+ [str(arg) for arg in self.args]), post_modifiers=post_modifiers)
def get_call(self, obj):
# type: (Optional[unicode]) -> unicode
@@ -119,11 +107,11 @@ class MethodInfo(object):
args = ', '.join([arg.name for arg in self.args])
if obj:
- return common.template_args(
- "${obj}.${method_name}(${args});", obj=obj, method_name=self.method_name, args=args)
+ return common.template_args("${obj}.${method_name}(${args});", obj=obj,
+ method_name=self.method_name, args=args)
- return common.template_args(
- "${method_name}(${args});", method_name=self.method_name, args=args)
+ return common.template_args("${method_name}(${args});", method_name=self.method_name,
+ args=args)
class StructTypeInfoBase(object):
@@ -223,11 +211,9 @@ class _StructTypeInfo(StructTypeInfoBase):
def get_deserializer_static_method(self):
# type: () -> MethodInfo
class_name = common.title_case(self._struct.cpp_name)
- return MethodInfo(
- class_name,
- 'parse', ['const IDLParserErrorContext& ctxt', 'const BSONObj& bsonObject'],
- class_name,
- static=True)
+ return MethodInfo(class_name, 'parse',
+ ['const IDLParserErrorContext& ctxt', 'const BSONObj& bsonObject'],
+ class_name, static=True)
def get_deserializer_method(self):
# type: () -> MethodInfo
@@ -238,10 +224,8 @@ class _StructTypeInfo(StructTypeInfoBase):
def get_serializer_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'serialize', ['BSONObjBuilder* builder'],
- 'void',
- const=True)
+ common.title_case(self._struct.cpp_name), 'serialize', ['BSONObjBuilder* builder'],
+ 'void', const=True)
def get_to_bson_method(self):
# type: () -> MethodInfo
@@ -290,19 +274,15 @@ class _CommandBaseTypeInfo(_StructTypeInfo):
def get_op_msg_request_serializer_method(self):
# type: () -> Optional[MethodInfo]
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'serialize', ['const BSONObj& commandPassthroughFields'],
- 'OpMsgRequest',
- const=True)
+ common.title_case(self._struct.cpp_name), 'serialize',
+ ['const BSONObj& commandPassthroughFields'], 'OpMsgRequest', const=True)
def get_op_msg_request_deserializer_static_method(self):
# type: () -> Optional[MethodInfo]
class_name = common.title_case(self._struct.cpp_name)
- return MethodInfo(
- class_name,
- 'parse', ['const IDLParserErrorContext& ctxt', 'const OpMsgRequest& request'],
- class_name,
- static=True)
+ return MethodInfo(class_name, 'parse',
+ ['const IDLParserErrorContext& ctxt', 'const OpMsgRequest& request'],
+ class_name, static=True)
def get_op_msg_request_deserializer_method(self):
# type: () -> Optional[MethodInfo]
@@ -324,19 +304,16 @@ class _IgnoredCommandTypeInfo(_CommandBaseTypeInfo):
def get_serializer_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'serialize', ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'],
- 'void',
+ common.title_case(self._struct.cpp_name), 'serialize',
+ ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'], 'void',
const=True)
def get_to_bson_method(self):
# type: () -> MethodInfo
# Commands that require namespaces require it as a parameter to serialize()
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'toBSON', ['const BSONObj& commandPassthroughFields'],
- 'BSONObj',
- const=True)
+ common.title_case(self._struct.cpp_name), 'toBSON',
+ ['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True)
def get_deserializer_static_method(self):
# type: () -> MethodInfo
@@ -388,18 +365,15 @@ class _CommandFromType(_CommandBaseTypeInfo):
def get_serializer_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'serialize', ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'],
- 'void',
+ common.title_case(self._struct.cpp_name), 'serialize',
+ ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'], 'void',
const=True)
def get_to_bson_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'toBSON', ['const BSONObj& commandPassthroughFields'],
- 'BSONObj',
- const=True)
+ common.title_case(self._struct.cpp_name), 'toBSON',
+ ['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True)
def get_deserializer_method(self):
# type: () -> MethodInfo
@@ -443,18 +417,15 @@ class _CommandWithNamespaceTypeInfo(_CommandBaseTypeInfo):
def get_serializer_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'serialize', ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'],
- 'void',
+ common.title_case(self._struct.cpp_name), 'serialize',
+ ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'], 'void',
const=True)
def get_to_bson_method(self):
# type: () -> MethodInfo
return MethodInfo(
- common.title_case(self._struct.cpp_name),
- 'toBSON', ['const BSONObj& commandPassthroughFields'],
- 'BSONObj',
- const=True)
+ common.title_case(self._struct.cpp_name), 'toBSON',
+ ['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True)
def get_deserializer_method(self):
# type: () -> MethodInfo
@@ -480,8 +451,8 @@ class _CommandWithNamespaceTypeInfo(_CommandBaseTypeInfo):
# type: (writer.IndentedTextWriter, unicode, unicode) -> None
# TODO: should the name of the first element be validated??
indented_writer.write_line('invariant(_nss.isEmpty());')
- indented_writer.write_line('_nss = ctxt.parseNSCollectionRequired(%s, %s);' %
- (db_name, element))
+ indented_writer.write_line('_nss = ctxt.parseNSCollectionRequired(%s, %s);' % (db_name,
+ element))
def get_struct_info(struct):
diff --git a/buildscripts/idl/idl/syntax.py b/buildscripts/idl/idl/syntax.py
index 4aca9e333fe..049114b5d9f 100644
--- a/buildscripts/idl/idl/syntax.py
+++ b/buildscripts/idl/idl/syntax.py
@@ -35,8 +35,8 @@ class IDLParsedSpec(object):
def __init__(self, spec, error_collection):
# type: (IDLSpec, errors.ParserErrorCollection) -> None
"""Must specify either an IDL document or errors, not both."""
- assert (spec is None and error_collection is not None) or (spec is not None and
- error_collection is None)
+ assert (spec is None and error_collection is not None) or (spec is not None
+ and error_collection is None)
self.spec = spec
self.errors = error_collection
@@ -81,8 +81,8 @@ def _zip_scalar(items, obj):
def _item_and_type(dic):
# type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]]
"""Return an Iterator of (key, value) pairs from a dictionary."""
- return itertools.chain.from_iterable((_zip_scalar(value, key)
- for (key, value) in dic.viewitems()))
+ return itertools.chain.from_iterable(
+ (_zip_scalar(value, key) for (key, value) in dic.viewitems()))
class SymbolTable(object):
diff --git a/buildscripts/idl/idlc.py b/buildscripts/idl/idlc.py
index c28c57166cf..04ecb9ecfe7 100644
--- a/buildscripts/idl/idlc.py
+++ b/buildscripts/idl/idlc.py
@@ -35,26 +35,18 @@ def main():
parser.add_argument('--header', type=str, help="IDL output header file")
- parser.add_argument(
- '-i',
- '--include',
- type=str,
- action="append",
- help="Directory to search for IDL import files")
+ parser.add_argument('-i', '--include', type=str, action="append",
+ help="Directory to search for IDL import files")
parser.add_argument('-v', '--verbose', action='count', help="Enable verbose tracing")
parser.add_argument('--base_dir', type=str, help="IDL output relative base directory")
- parser.add_argument(
- '--write-dependencies',
- action='store_true',
- help='only print out a list of dependent imports')
+ parser.add_argument('--write-dependencies', action='store_true',
+ help='only print out a list of dependent imports')
- parser.add_argument(
- '--target_arch',
- type=str,
- help="IDL target archiecture (amd64, s390x). defaults to current machine")
+ parser.add_argument('--target_arch', type=str,
+ help="IDL target archiecture (amd64, s390x). defaults to current machine")
args = parser.parse_args()
diff --git a/buildscripts/idl/tests/test_generator.py b/buildscripts/idl/tests/test_generator.py
index 3a1321de77d..bf57704bfd4 100644
--- a/buildscripts/idl/tests/test_generator.py
+++ b/buildscripts/idl/tests/test_generator.py
@@ -48,7 +48,8 @@ class TestGenerator(testcase.IDLTestcase):
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
src_dir = os.path.join(
base_dir,
- 'src', )
+ 'src',
+ )
idl_dir = os.path.join(src_dir, 'mongo', 'idl')
args = idl.compiler.CompilerArgs()
diff --git a/buildscripts/idl/tests/test_import.py b/buildscripts/idl/tests/test_import.py
index 4afea4e792b..7472aa09d7a 100644
--- a/buildscripts/idl/tests/test_import.py
+++ b/buildscripts/idl/tests/test_import.py
@@ -236,8 +236,7 @@ class TestImport(testcase.IDLTestcase):
strict: false
fields:
foo: string
- """),
- resolver=resolver)
+ """), resolver=resolver)
# Test nested import
self.assert_bind(
@@ -256,8 +255,7 @@ class TestImport(testcase.IDLTestcase):
foo: string
foo1: int
foo2: double
- """),
- resolver=resolver)
+ """), resolver=resolver)
# Test diamond import
self.assert_bind(
@@ -278,8 +276,7 @@ class TestImport(testcase.IDLTestcase):
foo1: int
foo2: double
foo3: bool
- """),
- resolver=resolver)
+ """), resolver=resolver)
# Test cycle import
self.assert_bind(
@@ -297,8 +294,7 @@ class TestImport(testcase.IDLTestcase):
fields:
foo: string
foo1: bool
- """),
- resolver=resolver)
+ """), resolver=resolver)
# Test self cycle import
self.assert_bind(
@@ -315,8 +311,7 @@ class TestImport(testcase.IDLTestcase):
strict: false
fields:
foo: string
- """),
- resolver=resolver)
+ """), resolver=resolver)
def test_import_negative(self):
# type: () -> None
@@ -373,9 +368,7 @@ class TestImport(testcase.IDLTestcase):
textwrap.dedent("""
imports:
- "notfound.idl"
- """),
- idl.errors.ERROR_ID_BAD_IMPORT,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_BAD_IMPORT, resolver=resolver)
# Duplicate types
self.assert_parse_fail(
@@ -388,9 +381,7 @@ class TestImport(testcase.IDLTestcase):
description: foo
cpp_type: foo
bson_serialization_type: string
- """),
- idl.errors.ERROR_ID_DUPLICATE_SYMBOL,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_DUPLICATE_SYMBOL, resolver=resolver)
# Duplicate structs
self.assert_parse_fail(
@@ -403,9 +394,7 @@ class TestImport(testcase.IDLTestcase):
description: foo
fields:
foo1: string
- """),
- idl.errors.ERROR_ID_DUPLICATE_SYMBOL,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_DUPLICATE_SYMBOL, resolver=resolver)
# Duplicate struct and type
self.assert_parse_fail(
@@ -418,9 +407,7 @@ class TestImport(testcase.IDLTestcase):
description: foo
fields:
foo1: string
- """),
- idl.errors.ERROR_ID_DUPLICATE_SYMBOL,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_DUPLICATE_SYMBOL, resolver=resolver)
# Duplicate type and struct
self.assert_parse_fail(
@@ -433,9 +420,7 @@ class TestImport(testcase.IDLTestcase):
description: foo
cpp_type: foo
bson_serialization_type: string
- """),
- idl.errors.ERROR_ID_DUPLICATE_SYMBOL,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_DUPLICATE_SYMBOL, resolver=resolver)
# Duplicate enums
self.assert_parse_fail(
@@ -450,9 +435,7 @@ class TestImport(testcase.IDLTestcase):
values:
a0: 0
b1: 1
- """),
- idl.errors.ERROR_ID_DUPLICATE_SYMBOL,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_DUPLICATE_SYMBOL, resolver=resolver)
# Import a file with errors
self.assert_parse_fail(
@@ -466,9 +449,7 @@ class TestImport(testcase.IDLTestcase):
description: foo
cpp_type: foo
bson_serialization_type: string
- """),
- idl.errors.ERROR_ID_MISSING_REQUIRED_FIELD,
- resolver=resolver)
+ """), idl.errors.ERROR_ID_MISSING_REQUIRED_FIELD, resolver=resolver)
if __name__ == '__main__':
diff --git a/buildscripts/idl/tests/test_parser.py b/buildscripts/idl/tests/test_parser.py
index 5a94514e88a..a8686166df9 100644
--- a/buildscripts/idl/tests/test_parser.py
+++ b/buildscripts/idl/tests/test_parser.py
@@ -223,9 +223,7 @@ class TestParser(testcase.IDLTestcase):
textwrap.dedent("""
types:
- foo:
- """),
- idl.errors.ERROR_ID_IS_NODE_TYPE,
- multiple=True)
+ """), idl.errors.ERROR_ID_IS_NODE_TYPE, multiple=True)
# test list instead of scalar
self.assert_parse_fail(
@@ -233,9 +231,7 @@ class TestParser(testcase.IDLTestcase):
types:
foo:
- bar
- """),
- idl.errors.ERROR_ID_IS_NODE_TYPE,
- multiple=True)
+ """), idl.errors.ERROR_ID_IS_NODE_TYPE, multiple=True)
# test map instead of scalar
self.assert_parse_fail(
@@ -244,9 +240,7 @@ class TestParser(testcase.IDLTestcase):
foo:
description:
foo: bar
- """),
- idl.errors.ERROR_ID_IS_NODE_TYPE,
- multiple=True)
+ """), idl.errors.ERROR_ID_IS_NODE_TYPE, multiple=True)
# test missing bson_serialization_type field
self.assert_parse_fail(
@@ -723,9 +717,7 @@ class TestParser(testcase.IDLTestcase):
textwrap.dedent("""
enums:
- foo:
- """),
- idl.errors.ERROR_ID_IS_NODE_TYPE,
- multiple=True)
+ """), idl.errors.ERROR_ID_IS_NODE_TYPE, multiple=True)
# test list instead of scalar
self.assert_parse_fail(
@@ -733,9 +725,7 @@ class TestParser(testcase.IDLTestcase):
enums:
foo:
- bar
- """),
- idl.errors.ERROR_ID_IS_NODE_TYPE,
- multiple=True)
+ """), idl.errors.ERROR_ID_IS_NODE_TYPE, multiple=True)
# test missing type field
self.assert_parse_fail(
diff --git a/buildscripts/jiraclient.py b/buildscripts/jiraclient.py
index a6896a6186d..7d798a9a4b5 100644
--- a/buildscripts/jiraclient.py
+++ b/buildscripts/jiraclient.py
@@ -12,24 +12,16 @@ class JiraClient(object):
FIXED_RESOLUTION_NAME = "Fixed"
WONT_FIX_RESOLUTION_NAME = "Won't Fix"
- def __init__(self,
- server,
- username=None,
- password=None,
- access_token=None,
- access_token_secret=None,
- consumer_key=None,
- key_cert=None):
+ def __init__(self, server, username=None, password=None, access_token=None,
+ access_token_secret=None, consumer_key=None, key_cert=None):
"""Initialize the JiraClient with the server URL and user credentials."""
opts = {"server": server, "verify": True}
basic_auth = None
oauth_dict = None
if access_token and access_token_secret and consumer_key and key_cert:
oauth_dict = {
- "access_token": access_token,
- "access_token_secret": access_token_secret,
- "consumer_key": consumer_key,
- "key_cert": key_cert
+ "access_token": access_token, "access_token_secret": access_token_secret,
+ "consumer_key": consumer_key, "key_cert": key_cert
}
elif username and password:
basic_auth = (username, password)
@@ -37,18 +29,17 @@ class JiraClient(object):
raise TypeError("Must specify Basic Auth (using arguments username & password)"
" or OAuth (using arguments access_token, access_token_secret,"
" consumer_key & key_cert_file) credentials")
- self._jira = jira.JIRA(
- options=opts, basic_auth=basic_auth, oauth=oauth_dict, validate=True)
+ self._jira = jira.JIRA(options=opts, basic_auth=basic_auth, oauth=oauth_dict, validate=True)
self._transitions = {}
self._resolutions = {}
def create_issue(self, project, summary, description, labels=None):
"""Create an issue."""
- fields = {"project": project,
- "issuetype": {"name": "Task"},
- "summary": summary,
- "description": description}
+ fields = {
+ "project": project, "issuetype": {"name": "Task"}, "summary": summary,
+ "description": description
+ }
new_issue = self._jira.create_issue(fields=fields)
if labels:
new_issue.update(fields={"labels": labels})
diff --git a/buildscripts/lint.py b/buildscripts/lint.py
index d4061a9b045..68d0252b6ef 100644
--- a/buildscripts/lint.py
+++ b/buildscripts/lint.py
@@ -1,10 +1,10 @@
-
import sys
import codecs
import cpplint
import utils
+
class CheckForConfigH:
def __init__(self):
self.found_configh = False
@@ -20,112 +20,107 @@ class CheckForConfigH:
error(filename, line_num, 'build/config_h_include', 5,
'MONGO_CONFIG define used without prior inclusion of config.h.')
-def run_lint( paths, nudgeOn=False ):
+
+def run_lint(paths, nudgeOn=False):
# errors are as of 10/14
# idea is not to let it any new type of error
# as we knock one out, we should remove line
# note: not all of these are things we want, so please check first
- nudge = [] # things we'd like to turn on sson, so don't make worse
- later = [] # things that are unlikely anytime soon, so meh
- never = [] # things we totally disagree with
-
- nudge.append( '-build/c++11' ) # errors found: 6
- never.append( '-build/header_guard' ) # errors found: 345
- nudge.append( '-build/include' ) # errors found: 924
- nudge.append( '-build/include_order' ) # errors found: 511
- nudge.append( '-build/include_what_you_use' ) # errors found: 986
- nudge.append( '-build/namespaces' ) # errors found: 131
- never.append( '-readability/braces' ) # errors found: 880
- later.append( '-readability/casting' ) # errors found: 748
- nudge.append( '-readability/check' ) # errors found: 7
- nudge.append( '-readability/fn_size' ) # errors found: 1
- nudge.append( '-readability/function' ) # errors found: 49
- nudge.append( '-readability/inheritance' ) # errors found: 7
- nudge.append( '-readability/multiline_comment' ) # errors found: 1
- later.append( '-readability/namespace' ) # errors found: 876
- later.append( '-readability/streams' ) # errors found: 72
- later.append( '-readability/todo' ) # errors found: 309
- nudge.append( '-runtime/arrays' ) # errors found: 5
- later.append( '-runtime/explicit' ) # errors found: 322
- never.append( '-runtime/indentation_namespace') # errors found: 4601
- later.append( '-runtime/int' ) # errors found: 1420
- later.append( '-runtime/printf' ) # errors found: 29
- nudge.append( '-runtime/references' ) # errors found: 1338
- nudge.append( '-runtime/string' ) # errors found: 6
- nudge.append( '-runtime/threadsafe_fn' ) # errors found: 46
- never.append( '-whitespace/blank_line' ) # errors found: 2080
- never.append( '-whitespace/braces' ) # errors found: 962
- later.append( '-whitespace/comma' ) # errors found: 621
- later.append( '-whitespace/comments' ) # errors found: 2189
- nudge.append( '-whitespace/empty_loop_body' ) # errors found: 19
- later.append( '-whitespace/end_of_line' ) # errors found: 4340
- later.append( '-whitespace/line_length' ) # errors found: 14500
- never.append( '-whitespace/indent' ) # errors found: 4108
- later.append( '-whitespace/newline' ) # errors found: 1520
- nudge.append( '-whitespace/operators' ) # errors found: 2297
- never.append( '-whitespace/parens' ) # errors found: 49058
- nudge.append( '-whitespace/semicolon' ) # errors found: 121
- nudge.append( '-whitespace/tab' ) # errors found: 233
+ nudge = [] # things we'd like to turn on sson, so don't make worse
+ later = [] # things that are unlikely anytime soon, so meh
+ never = [] # things we totally disagree with
+
+ nudge.append('-build/c++11') # errors found: 6
+ never.append('-build/header_guard') # errors found: 345
+ nudge.append('-build/include') # errors found: 924
+ nudge.append('-build/include_order') # errors found: 511
+ nudge.append('-build/include_what_you_use') # errors found: 986
+ nudge.append('-build/namespaces') # errors found: 131
+ never.append('-readability/braces') # errors found: 880
+ later.append('-readability/casting') # errors found: 748
+ nudge.append('-readability/check') # errors found: 7
+ nudge.append('-readability/fn_size') # errors found: 1
+ nudge.append('-readability/function') # errors found: 49
+ nudge.append('-readability/inheritance') # errors found: 7
+ nudge.append('-readability/multiline_comment') # errors found: 1
+ later.append('-readability/namespace') # errors found: 876
+ later.append('-readability/streams') # errors found: 72
+ later.append('-readability/todo') # errors found: 309
+ nudge.append('-runtime/arrays') # errors found: 5
+ later.append('-runtime/explicit') # errors found: 322
+ never.append('-runtime/indentation_namespace') # errors found: 4601
+ later.append('-runtime/int') # errors found: 1420
+ later.append('-runtime/printf') # errors found: 29
+ nudge.append('-runtime/references') # errors found: 1338
+ nudge.append('-runtime/string') # errors found: 6
+ nudge.append('-runtime/threadsafe_fn') # errors found: 46
+ never.append('-whitespace/blank_line') # errors found: 2080
+ never.append('-whitespace/braces') # errors found: 962
+ later.append('-whitespace/comma') # errors found: 621
+ later.append('-whitespace/comments') # errors found: 2189
+ nudge.append('-whitespace/empty_loop_body') # errors found: 19
+ later.append('-whitespace/end_of_line') # errors found: 4340
+ later.append('-whitespace/line_length') # errors found: 14500
+ never.append('-whitespace/indent') # errors found: 4108
+ later.append('-whitespace/newline') # errors found: 1520
+ nudge.append('-whitespace/operators') # errors found: 2297
+ never.append('-whitespace/parens') # errors found: 49058
+ nudge.append('-whitespace/semicolon') # errors found: 121
+ nudge.append('-whitespace/tab') # errors found: 233
filters = later + never
if not nudgeOn:
filters = filters + nudge
-
sourceFiles = []
for x in paths:
- utils.getAllSourceFiles( sourceFiles, x )
-
+ utils.getAllSourceFiles(sourceFiles, x)
- args = ["--linelength=100",
- "--filter=" + ",".join( filters ),
- "--counting=detailed" ] + sourceFiles
- filenames = cpplint.ParseArguments( args )
+ args = ["--linelength=100", "--filter=" + ",".join(filters), "--counting=detailed"
+ ] + sourceFiles
+ filenames = cpplint.ParseArguments(args)
def _ourIsTestFilename(fn):
- if fn.find( "dbtests" ) >= 0:
+ if fn.find("dbtests") >= 0:
return True
- if fn.endswith( "_test.cpp" ):
+ if fn.endswith("_test.cpp"):
return True
return False
-
+
cpplint._IsTestFilename = _ourIsTestFilename
# Change stderr to write with replacement characters so we don't die
# if we try to print something containing non-ASCII characters.
- sys.stderr = codecs.StreamReaderWriter(sys.stderr,
- codecs.getreader('utf8'),
- codecs.getwriter('utf8'),
- 'replace')
+ sys.stderr = codecs.StreamReaderWriter(sys.stderr, codecs.getreader('utf8'),
+ codecs.getwriter('utf8'), 'replace')
cpplint._cpplint_state.ResetErrorCounts()
for filename in filenames:
config_h_check_obj = CheckForConfigH()
- cpplint.ProcessFile(filename,
- cpplint._cpplint_state.verbose_level,
- extra_check_functions=[config_h_check_obj])
+ cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level,
+ extra_check_functions=[config_h_check_obj])
cpplint._cpplint_state.PrintErrorCounts()
-
+
return cpplint._cpplint_state.error_count == 0
if __name__ == "__main__":
paths = []
nudge = False
-
+
for arg in sys.argv[1:]:
- if arg.startswith( "--" ):
+ if arg.startswith("--"):
arg = arg[2:]
if arg == "nudge":
nudge = True
continue
else:
- print( "unknown arg [%s]" % arg )
+ print("unknown arg [%s]" % arg)
sys.exit(-1)
- paths.append( arg )
+ paths.append(arg)
if len(paths) == 0:
- paths.append( "src/mongo/" )
+ paths.append("src/mongo/")
- if not run_lint( paths, nudge ):
+ if not run_lint(paths, nudge):
sys.exit(-1)
diff --git a/buildscripts/linter/yapf.py b/buildscripts/linter/yapf.py
index 86955981092..1ea3da7bae3 100644
--- a/buildscripts/linter/yapf.py
+++ b/buildscripts/linter/yapf.py
@@ -13,7 +13,7 @@ class YapfLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a yapf linter."""
- super(YapfLinter, self).__init__("yapf", "yapf 0.16.0")
+ super(YapfLinter, self).__init__("yapf", "yapf 0.21.0")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py
index 2671fa530dd..f3537cebc25 100755
--- a/buildscripts/make_archive.py
+++ b/buildscripts/make_archive.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
'''Helper script for constructing an archive (zip or tar) from a list of files.
The output format (tar, tgz, zip) is determined from the file name, unless the user specifies
@@ -35,6 +34,7 @@ import zipfile
import tempfile
from subprocess import (Popen, PIPE, STDOUT)
+
def main(argv):
args = []
for arg in argv[1:]:
@@ -54,6 +54,7 @@ def main(argv):
else:
raise ValueError('Unsupported archive format "%s"' % opts.archive_format)
+
def delete_directory(dir):
'''Recursively deletes a directory and its contents.
'''
@@ -62,6 +63,7 @@ def delete_directory(dir):
except Exception:
pass
+
def make_tar_archive(opts):
'''Given the parsed options, generates the 'opt.output_filename'
tarball containing all the files in 'opt.input_filename' renamed
@@ -81,10 +83,7 @@ def make_tar_archive(opts):
tar_options += "z"
# clean and create a temp directory to copy files to
- enclosing_archive_directory = tempfile.mkdtemp(
- prefix='archive_',
- dir=os.path.abspath('build')
- )
+ enclosing_archive_directory = tempfile.mkdtemp(prefix='archive_', dir=os.path.abspath('build'))
output_tarfile = os.path.join(os.getcwd(), opts.output_filename)
tar_command = ["tar", tar_options, output_tarfile]
@@ -111,6 +110,7 @@ def make_tar_archive(opts):
# delete temp directory
delete_directory(enclosing_archive_directory)
+
def make_zip_archive(opts):
'''Given the parsed options, generates the 'opt.output_filename'
zipfile containing all the files in 'opt.input_filename' renamed
@@ -122,8 +122,8 @@ def make_zip_archive(opts):
archive = open_zip_archive_for_write(opts.output_filename)
try:
for input_filename in opts.input_filenames:
- archive.add(input_filename, arcname=get_preferred_filename(input_filename,
- opts.transformations))
+ archive.add(input_filename, arcname=get_preferred_filename(
+ input_filename, opts.transformations))
finally:
archive.close()
@@ -132,10 +132,10 @@ def parse_options(args):
parser = optparse.OptionParser()
parser.add_option('-o', dest='output_filename', default=None,
help='Name of the archive to output.', metavar='FILE')
- parser.add_option('--format', dest='archive_format', default=None,
- choices=('zip', 'tar', 'tgz'),
- help='Format of archive to create. '
- 'If omitted, use the suffix of the output filename to decide.')
+ parser.add_option('--format', dest='archive_format', default=None, choices=('zip', 'tar',
+ 'tgz'),
+ help=('Format of archive to create. '
+ 'If omitted, use the suffix of the output filename to decide.'))
parser.add_option('--transform', action='append', dest='transformations', default=[])
(opts, input_filenames) = parser.parse_args(args)
@@ -158,28 +158,33 @@ def parse_options(args):
elif opts.output_filename.endswith('.tar'):
opts.archive_format = 'tar'
else:
- parser.error('Could not deduce archive format from output filename "%s"' %
- opts.output_filename)
+ parser.error(
+ 'Could not deduce archive format from output filename "%s"' % opts.output_filename)
try:
opts.transformations = [
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=', 1)
- for xform in opts.transformations]
+ for xform in opts.transformations
+ ]
except Exception, e:
parser.error(e)
return opts
+
def open_zip_archive_for_write(filename):
'''Open a zip archive for writing and return it.
'''
+
# Infuriatingly, Zipfile calls the "add" method "write", but they're otherwise identical,
# for our purposes. WrappedZipFile is a minimal adapter class.
class WrappedZipFile(zipfile.ZipFile):
def add(self, filename, arcname):
return self.write(filename, arcname)
+
return WrappedZipFile(filename, 'w', zipfile.ZIP_DEFLATED)
+
def get_preferred_filename(input_filename, transformations):
'''Does a prefix subsitution on 'input_filename' for the
first matching transformation in 'transformations' and
@@ -192,6 +197,7 @@ def get_preferred_filename(input_filename, transformations):
return replace + input_filename[len(match):]
return input_filename
+
if __name__ == '__main__':
main(sys.argv)
sys.exit(0)
diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py
index e4c2d7a52f9..c0d18fffd2f 100644
--- a/buildscripts/make_vcxproj.py
+++ b/buildscripts/make_vcxproj.py
@@ -33,6 +33,7 @@ VCXPROJ_FOOTER = r"""
</Project>
"""
+
def get_defines(args):
"""Parse a compiler argument list looking for defines"""
ret = set()
@@ -41,6 +42,7 @@ def get_defines(args):
ret.add(arg[2:])
return ret
+
def get_includes(args):
"""Parse a compiler argument list looking for includes"""
ret = set()
@@ -49,8 +51,10 @@ def get_includes(args):
ret.add(arg[2:])
return ret
+
class ProjFileGenerator(object):
"""Generate a .vcxproj and .vcxprof.filters file"""
+
def __init__(self, target):
# we handle DEBUG in the vcxproj header:
self.common_defines = set()
@@ -75,8 +79,8 @@ class ProjFileGenerator(object):
with open('buildscripts/vcxproj.header', 'r') as header_file:
header_str = header_file.read()
header_str = header_str.replace("%_TARGET_%", self.target)
- header_str = header_str.replace("%AdditionalIncludeDirectories%",
- ';'.join(sorted(self.includes)))
+ header_str = header_str.replace("%AdditionalIncludeDirectories%", ';'.join(
+ sorted(self.includes)))
self.vcxproj.write(header_str)
common_defines = self.all_defines
@@ -84,19 +88,18 @@ class ProjFileGenerator(object):
common_defines = common_defines.intersection(c['defines'])
self.vcxproj.write("<!-- common_defines -->\n")
- self.vcxproj.write("<ItemDefinitionGroup><ClCompile><PreprocessorDefinitions>"
- + ';'.join(common_defines) + ";%(PreprocessorDefinitions)\n")
+ self.vcxproj.write("<ItemDefinitionGroup><ClCompile><PreprocessorDefinitions>" +
+ ';'.join(common_defines) + ";%(PreprocessorDefinitions)\n")
self.vcxproj.write("</PreprocessorDefinitions></ClCompile></ItemDefinitionGroup>\n")
self.vcxproj.write(" <ItemGroup>\n")
for command in self.compiles:
defines = command["defines"].difference(common_defines)
if len(defines) > 0:
- self.vcxproj.write(" <ClCompile Include=\"" + command["file"] +
- "\"><PreprocessorDefinitions>" +
- ';'.join(defines) +
- ";%(PreprocessorDefinitions)" +
- "</PreprocessorDefinitions></ClCompile>\n")
+ self.vcxproj.write(
+ " <ClCompile Include=\"" + command["file"] + "\"><PreprocessorDefinitions>" +
+ ';'.join(defines) + ";%(PreprocessorDefinitions)" +
+ "</PreprocessorDefinitions></ClCompile>\n")
else:
self.vcxproj.write(" <ClCompile Include=\"" + command["file"] + "\" />\n")
self.vcxproj.write(" </ItemGroup>\n")
@@ -141,7 +144,7 @@ class ProjFileGenerator(object):
for arg in get_includes(args):
self.includes.add(arg)
- self.compiles.append({"file" : file_name, "defines" : file_defines})
+ self.compiles.append({"file": file_name, "defines": file_defines})
def __is_header(self, name):
"""Is this a header file?"""
@@ -167,7 +170,7 @@ class ProjFileGenerator(object):
for directory in dirs:
if not os.path.exists(directory):
print(("Warning: skipping include file scan for directory '%s'" +
- " because it does not exist.") % str(directory))
+ " because it does not exist.") % str(directory))
continue
# Get all the header files
@@ -239,6 +242,7 @@ class ProjFileGenerator(object):
self.vcxproj.write(" <None Include='%s' />\n" % file_name)
self.vcxproj.write(" </ItemGroup>\n")
+
def main():
if len(sys.argv) != 2:
print r"Usage: python buildscripts\make_vcxproj.py FILE_NAME"
@@ -253,4 +257,5 @@ def main():
command_str = command["command"]
projfile.parse_line(command_str)
+
main()
diff --git a/buildscripts/moduleconfig.py b/buildscripts/moduleconfig.py
index eece68bcdc3..e6f39b65d5e 100644
--- a/buildscripts/moduleconfig.py
+++ b/buildscripts/moduleconfig.py
@@ -26,12 +26,13 @@ MongoDB SConscript files do.
from __future__ import print_function
__all__ = ('discover_modules', 'discover_module_directories', 'configure_modules',
- 'register_module_test')
+ 'register_module_test')
import imp
import inspect
import os
+
def discover_modules(module_root, allowed_modules):
"""Scans module_root for subdirectories that look like MongoDB modules.
@@ -71,6 +72,7 @@ def discover_modules(module_root, allowed_modules):
return found_modules
+
def discover_module_directories(module_root, allowed_modules):
"""Scans module_root for subdirectories that look like MongoDB modules.
@@ -101,6 +103,7 @@ def discover_module_directories(module_root, allowed_modules):
return found_modules
+
def configure_modules(modules, conf):
""" Run the configure() function in the build.py python modules for each module in "modules"
(as created by discover_modules).
@@ -114,6 +117,7 @@ def configure_modules(modules, conf):
root = os.path.dirname(module.__file__)
module.configure(conf, conf.env)
+
def get_module_sconscripts(modules):
sconscripts = []
for m in modules:
@@ -121,6 +125,7 @@ def get_module_sconscripts(modules):
sconscripts.append(os.path.join(module_dir_path, 'SConscript'))
return sconscripts
+
def __get_src_relative_path(path):
"""Return a path relative to ./src.
@@ -135,6 +140,7 @@ def __get_src_relative_path(path):
result = path[len(src_dir) + 1:]
return result
+
def __get_module_path(module_frame_depth):
"""Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames
above this function, relative to the "src" directory.
@@ -142,6 +148,7 @@ def __get_module_path(module_frame_depth):
module_filename = inspect.stack()[module_frame_depth + 1][1]
return os.path.dirname(__get_src_relative_path(module_filename))
+
def __get_module_src_path(module_frame_depth):
"""Return the path relative to the SConstruct file of the MongoDB module's source tree.
@@ -150,6 +157,7 @@ def __get_module_src_path(module_frame_depth):
"""
return os.path.join('src', __get_module_path(module_frame_depth + 1))
+
def __get_module_build_path(module_frame_depth):
"""Return the path relative to the SConstruct file of the MongoDB module's build tree.
@@ -158,6 +166,7 @@ def __get_module_build_path(module_frame_depth):
"""
return os.path.join('$BUILD_DIR', __get_module_path(module_frame_depth + 1))
+
def get_current_module_src_path():
"""Return the path relative to the SConstruct file of the current MongoDB module's source tree.
@@ -165,6 +174,7 @@ def get_current_module_src_path():
"""
return __get_module_src_path(1)
+
def get_current_module_build_path():
"""Return the path relative to the SConstruct file of the current MongoDB module's build tree.
@@ -173,6 +183,7 @@ def get_current_module_build_path():
return __get_module_build_path(1)
+
def get_current_module_libdep_name(libdep_rel_path):
"""Return a $BUILD_DIR relative path to a "libdep_rel_path", where "libdep_rel_path"
is specified relative to the MongoDB module's build.py file.
diff --git a/buildscripts/mongosymb.py b/buildscripts/mongosymb.py
index 4da05350558..a38c395978d 100755
--- a/buildscripts/mongosymb.py
+++ b/buildscripts/mongosymb.py
@@ -23,6 +23,7 @@ import os
import subprocess
import sys
+
def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hint=None):
"""Given a trace_doc in MongoDB stack dump format, returns a list of symbolized stack frames.
"""
@@ -36,7 +37,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
"""Makes a map from binary load address to description of library from the somap, which is
a list of dictionaries describing individual loaded libraries.
"""
- return { so_entry["b"] : so_entry for so_entry in somap_list if so_entry.has_key("b") }
+ return {so_entry["b"]: so_entry for so_entry in somap_list if so_entry.has_key("b")}
base_addr_map = make_base_addr_map(trace_doc["processInfo"]["somap"])
@@ -57,21 +58,17 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
# address of instructions that cause signals (such as segfaults and divide-by-zero) which
# are already correct, but there doesn't seem to be a reliable way to detect that case.
addr -= 1
- frames.append(dict(path=dbg_path_resolver.get_dbg_file(soinfo),
- buildId=soinfo.get("buildId", None),
- offset=frame["o"],
- addr=addr,
- symbol=frame.get("s", None)))
+ frames.append(
+ dict(
+ path=dbg_path_resolver.get_dbg_file(soinfo), buildId=soinfo.get("buildId", None),
+ offset=frame["o"], addr=addr, symbol=frame.get("s", None)))
symbolizer_args = [symbolizer_path]
for dh in dsym_hint:
- symbolizer_args.append("-dsym-hint=%s" %dh)
- symbolizer_process = subprocess.Popen(
- args=symbolizer_args,
- close_fds=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=open("/dev/null"))
+ symbolizer_args.append("-dsym-hint=%s" % dh)
+ symbolizer_process = subprocess.Popen(args=symbolizer_args, close_fds=True,
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=open("/dev/null"))
def extract_symbols(stdin):
"""Extracts symbol information from the output of llvm-symbolizer.
@@ -93,7 +90,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
if line == "\n":
break
if step == 0:
- result.append({"fn" : line.strip()})
+ result.append({"fn": line.strip()})
step = 1
else:
file_name, line, column = line.strip().rsplit(':', 3)
@@ -111,6 +108,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
symbolizer_process.wait()
return frames
+
class path_dbg_file_resolver(object):
def __init__(self, bin_path_guess):
self._bin_path_guess = bin_path_guess
@@ -118,6 +116,7 @@ class path_dbg_file_resolver(object):
def get_dbg_file(self, soinfo):
return soinfo.get("path", self._bin_path_guess)
+
class s3_buildid_dbg_file_resolver(object):
def __init__(self, cache_dir, s3_bucket):
self._cache_dir = cache_dir
@@ -134,7 +133,7 @@ class s3_buildid_dbg_file_resolver(object):
self._get_from_s3(buildId)
except:
ex = sys.exc_info()[0]
- sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" %(buildId, ex))
+ sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" % (buildId, ex))
return None
if not os.path.exists(buildIdPath):
return None
@@ -142,10 +141,11 @@ class s3_buildid_dbg_file_resolver(object):
def _get_from_s3(self, buildId):
subprocess.check_call(
- ['wget', 'https://s3.amazonaws.com/%s/%s.debug.gz' % (self._s3_bucket, buildId)],
- cwd=self._cache_dir)
+ ['wget', 'https://s3.amazonaws.com/%s/%s.debug.gz' %
+ (self._s3_bucket, buildId)], cwd=self._cache_dir)
subprocess.check_call(['gunzip', buildId + ".debug.gz"], cwd=self._cache_dir)
+
def classic_output(frames, outfile, **kwargs):
for frame in frames:
symbinfo = frame["symbinfo"]
@@ -155,6 +155,7 @@ def classic_output(frames, outfile, **kwargs):
else:
outfile.write(" %(path)s!!!\n" % symbinfo)
+
def main(argv):
parser = optparse.OptionParser()
parser.add_option("--dsym-hint", action="append", dest="dsym_hint")
@@ -173,7 +174,6 @@ def main(argv):
sys.stderr.write("Invalid output-format argument: %s\n" % options.output_format)
sys.exit(1)
-
# Skip over everything before the first '{' since it is likely to be log line prefixes.
# Additionally, using raw_decode() to ignore extra data after the closing '}' to allow maximal
# sloppiness in copy-pasting input.
@@ -182,12 +182,11 @@ def main(argv):
trace_doc = json.JSONDecoder().raw_decode(trace_doc)[0]
resolver = resolver_constructor(*args[1:])
- frames = symbolize_frames(trace_doc,
- resolver,
- symbolizer_path=options.symbolizer_path,
+ frames = symbolize_frames(trace_doc, resolver, symbolizer_path=options.symbolizer_path,
dsym_hint=options.dsym_hint)
output_fn(frames, sys.stdout, indent=2)
+
if __name__ == '__main__':
main(sys.argv)
sys.exit(0)
diff --git a/buildscripts/msitrim.py b/buildscripts/msitrim.py
index 45ca8d482ac..52736d8e869 100644
--- a/buildscripts/msitrim.py
+++ b/buildscripts/msitrim.py
@@ -1,43 +1,52 @@
-"""Script to fix up our MSI files """
-
-import argparse;
-import msilib
-import shutil;
-
-parser = argparse.ArgumentParser(description='Trim MSI.')
-parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
-parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
-
-args = parser.parse_args()
-
-def exec_delete(query):
- view = db.OpenView(query)
- view.Execute(None)
-
- cur_record = view.Fetch()
- view.Modify(msilib.MSIMODIFY_DELETE, cur_record)
- view.Close()
-
-
-def exec_update(query, column, value):
- view = db.OpenView(query)
- view.Execute(None)
-
- cur_record = view.Fetch()
- cur_record.SetString(column, value)
- view.Modify(msilib.MSIMODIFY_REPLACE, cur_record)
- view.Close()
-
-
-print "Trimming MSI"
-
-db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
-
-exec_delete("select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'")
-exec_delete("select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'")
-exec_delete("select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'")
-exec_delete("select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'")
-
-db.Commit()
-
-shutil.copyfile(args.file.name, args.out.name);
+"""Script to fix up our MSI files """
+
+import argparse
+import msilib
+import shutil
+
+parser = argparse.ArgumentParser(description='Trim MSI.')
+parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
+parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
+
+args = parser.parse_args()
+
+
+def exec_delete(query):
+ view = db.OpenView(query)
+ view.Execute(None)
+
+ cur_record = view.Fetch()
+ view.Modify(msilib.MSIMODIFY_DELETE, cur_record)
+ view.Close()
+
+
+def exec_update(query, column, value):
+ view = db.OpenView(query)
+ view.Execute(None)
+
+ cur_record = view.Fetch()
+ cur_record.SetString(column, value)
+ view.Modify(msilib.MSIMODIFY_REPLACE, cur_record)
+ view.Close()
+
+
+print "Trimming MSI"
+
+db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
+
+exec_delete(
+ "select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
+)
+exec_delete(
+ "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'"
+)
+exec_delete(
+ "select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'"
+)
+exec_delete(
+ "select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
+)
+
+db.Commit()
+
+shutil.copyfile(args.file.name, args.out.name)
diff --git a/buildscripts/packager-enterprise.py b/buildscripts/packager-enterprise.py
index 8629be1963f..c26b515e954 100755
--- a/buildscripts/packager-enterprise.py
+++ b/buildscripts/packager-enterprise.py
@@ -42,15 +42,15 @@ import time
import urlparse
# The MongoDB names for the architectures we support.
-ARCH_CHOICES=["x86_64", "ppc64le", "s390x", "arm64"]
+ARCH_CHOICES = ["x86_64", "ppc64le", "s390x", "arm64"]
# Made up names for the flavors of distribution we package for.
-DISTROS=["suse", "debian","redhat","ubuntu","amazon"]
+DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon"]
class EnterpriseSpec(packager.Spec):
def suffix(self):
- return "-enterprise" if int(self.ver.split(".")[1])%2==0 else "-enterprise-unstable"
+ return "-enterprise" if int(self.ver.split(".")[1]) % 2 == 0 else "-enterprise-unstable"
class EnterpriseDistro(packager.Distro):
@@ -92,16 +92,20 @@ class EnterpriseDistro(packager.Distro):
repo_directory = ""
if spec.is_pre_release():
- repo_directory = "testing"
+ repo_directory = "testing"
else:
- repo_directory = spec.branch()
+ repo_directory = spec.branch()
if re.search("^(debian|ubuntu)", self.n):
- return "repo/apt/%s/dists/%s/mongodb-enterprise/%s/%s/binary-%s/" % (self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(), self.archname(arch))
+ return "repo/apt/%s/dists/%s/mongodb-enterprise/%s/%s/binary-%s/" % (
+ self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
+ self.archname(arch))
elif re.search("(redhat|fedora|centos|amazon)", self.n):
- return "repo/yum/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ return "repo/yum/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
+ self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
elif re.search("(suse)", self.n):
- return "repo/zypper/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ return "repo/zypper/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
+ self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
else:
raise Exception("BUG: unsupported platform?")
@@ -111,80 +115,83 @@ class EnterpriseDistro(packager.Distro):
"""
if arch == "ppc64le":
if self.n == 'ubuntu':
- return [ "ubuntu1604" ]
+ return ["ubuntu1604"]
if self.n == 'redhat':
- return [ "rhel71" ]
+ return ["rhel71"]
else:
return []
if arch == "s390x":
if self.n == 'redhat':
- return [ "rhel67", "rhel72" ]
+ return ["rhel67", "rhel72"]
if self.n == 'suse':
- return [ "suse11", "suse12" ]
+ return ["suse11", "suse12"]
if self.n == 'ubuntu':
- return [ "ubuntu1604" ]
+ return ["ubuntu1604"]
else:
return []
if arch == "arm64":
if self.n == 'ubuntu':
- return [ "ubuntu1604" ]
+ return ["ubuntu1604"]
else:
return []
if re.search("(redhat|fedora|centos)", self.n):
- return [ "rhel70", "rhel62", "rhel57" ]
+ return ["rhel70", "rhel62", "rhel57"]
else:
return super(EnterpriseDistro, self).build_os(arch)
+
def main(argv):
- distros=[EnterpriseDistro(distro) for distro in DISTROS]
+ distros = [EnterpriseDistro(distro) for distro in DISTROS]
args = packager.get_args(distros, ARCH_CHOICES)
spec = EnterpriseSpec(args.server_version, args.metadata_gitspec, args.release_number)
- oldcwd=os.getcwd()
- srcdir=oldcwd+"/../"
+ oldcwd = os.getcwd()
+ srcdir = oldcwd + "/../"
# Where to do all of our work. Use a randomly-created directory if one
# is not passed in.
prefix = args.prefix
if prefix is None:
- prefix=tempfile.mkdtemp()
+ prefix = tempfile.mkdtemp()
print "Working in directory %s" % prefix
os.chdir(prefix)
try:
- made_pkg = False
- # Build a package for each distro/spec/arch tuple, and
- # accumulate the repository-layout directories.
- for (distro, arch) in packager.crossproduct(distros, args.arches):
+ made_pkg = False
+ # Build a package for each distro/spec/arch tuple, and
+ # accumulate the repository-layout directories.
+ for (distro, arch) in packager.crossproduct(distros, args.arches):
- for build_os in distro.build_os(arch):
- if build_os in args.distros or not args.distros:
+ for build_os in distro.build_os(arch):
+ if build_os in args.distros or not args.distros:
- filename = tarfile(build_os, arch, spec)
- packager.ensure_dir(filename)
- shutil.copyfile(args.tarball, filename)
+ filename = tarfile(build_os, arch, spec)
+ packager.ensure_dir(filename)
+ shutil.copyfile(args.tarball, filename)
- repo = make_package(distro, build_os, arch, spec, srcdir)
- make_repo(repo, distro, build_os, spec)
+ repo = make_package(distro, build_os, arch, spec, srcdir)
+ make_repo(repo, distro, build_os, spec)
- made_pkg = True
+ made_pkg = True
- if not made_pkg:
- raise Exception("No valid combination of distro and arch selected")
+ if not made_pkg:
+ raise Exception("No valid combination of distro and arch selected")
finally:
os.chdir(oldcwd)
+
def tarfile(build_os, arch, spec):
"""Return the location where we store the downloaded tarball for
this package"""
return "dl/mongodb-linux-%s-enterprise-%s-%s.tar.gz" % (spec.version(), build_os, arch)
+
def setupdir(distro, build_os, arch, spec):
# The setupdir will be a directory containing all inputs to the
# distro's packaging tools (e.g., package metadata files, init
@@ -192,11 +199,13 @@ def setupdir(distro, build_os, arch, spec):
# the following format string is unclear, an example setupdir
# would be dst/x86_64/debian-sysvinit/wheezy/mongodb-org-unstable/
# or dst/x86_64/redhat/rhel57/mongodb-org-unstable/
- return "dst/%s/%s/%s/%s%s-%s/" % (arch, distro.name(), build_os, distro.pkgbase(), spec.suffix(), spec.pversion(distro))
+ return "dst/%s/%s/%s/%s%s-%s/" % (arch, distro.name(), build_os, distro.pkgbase(),
+ spec.suffix(), spec.pversion(distro))
+
def unpack_binaries_into(build_os, arch, spec, where):
"""Unpack the tarfile for (build_os, arch, spec) into directory where."""
- rootdir=os.getcwd()
+ rootdir = os.getcwd()
packager.ensure_dir(where)
# Note: POSIX tar doesn't require support for gtar's "-C" option,
# and Python's tarfile module prior to Python 2.7 doesn't have the
@@ -204,23 +213,24 @@ def unpack_binaries_into(build_os, arch, spec, where):
# thing and chdir into where and run tar there.
os.chdir(where)
try:
- packager.sysassert(["tar", "xvzf", rootdir+"/"+tarfile(build_os, arch, spec)])
- release_dir = glob('mongodb-linux-*')[0]
+ packager.sysassert(["tar", "xvzf", rootdir + "/" + tarfile(build_os, arch, spec)])
+ release_dir = glob('mongodb-linux-*')[0]
for releasefile in "bin", "snmp", "LICENSE.txt", "README", "THIRD-PARTY-NOTICES", "MPL-2":
os.rename("%s/%s" % (release_dir, releasefile), releasefile)
os.rmdir(release_dir)
except Exception:
- exc=sys.exc_value
+ exc = sys.exc_value
os.chdir(rootdir)
raise exc
os.chdir(rootdir)
+
def make_package(distro, build_os, arch, spec, srcdir):
"""Construct the package for (arch, distro, spec), getting
packaging files from srcdir and any user-specified suffix from
suffixes"""
- sdir=setupdir(distro, build_os, arch, spec)
+ sdir = setupdir(distro, build_os, arch, spec)
packager.ensure_dir(sdir)
# Note that the RPM packages get their man pages from the debian
# directory, so the debian directory is needed in all cases (and
@@ -228,7 +238,11 @@ def make_package(distro, build_os, arch, spec, srcdir):
for pkgdir in ["debian", "rpm"]:
print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
# FIXME: sh-dash-cee is bad. See if tarfile can do this.
- packager.sysassert(["sh", "-c", "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" % (srcdir, spec.metadata_gitspec(), pkgdir, sdir)])
+ packager.sysassert([
+ "sh", "-c",
+ "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" %
+ (srcdir, spec.metadata_gitspec(), pkgdir, sdir)
+ ])
# Splat the binaries and snmp files under sdir. The "build" stages of the
# packaging infrastructure will move the files to wherever they
# need to go.
@@ -236,9 +250,10 @@ def make_package(distro, build_os, arch, spec, srcdir):
# Remove the mongoreplay binary due to libpcap dynamic
# linkage.
if os.path.exists(sdir + "bin/mongoreplay"):
- os.unlink(sdir + "bin/mongoreplay")
+ os.unlink(sdir + "bin/mongoreplay")
return distro.make_pkg(build_os, arch, spec, srcdir)
+
def make_repo(repodir, distro, build_os, spec):
if re.search("(debian|ubuntu)", repodir):
make_deb_repo(repodir, distro, build_os, spec)
@@ -247,26 +262,30 @@ def make_repo(repodir, distro, build_os, spec):
else:
raise Exception("BUG: unsupported platform?")
+
def make_deb_repo(repo, distro, build_os, spec):
# Note: the Debian repository Packages files must be generated
# very carefully in order to be usable.
- oldpwd=os.getcwd()
- os.chdir(repo+"../../../../../../")
+ oldpwd = os.getcwd()
+ os.chdir(repo + "../../../../../../")
try:
- dirs=set([os.path.dirname(deb)[2:] for deb in packager.backtick(["find", ".", "-name", "*.deb"]).split()])
+ dirs = set([
+ os.path.dirname(deb)[2:]
+ for deb in packager.backtick(["find", ".", "-name", "*.deb"]).split()
+ ])
for d in dirs:
- s=packager.backtick(["dpkg-scanpackages", d, "/dev/null"])
- with open(d+"/Packages", "w") as f:
+ s = packager.backtick(["dpkg-scanpackages", d, "/dev/null"])
+ with open(d + "/Packages", "w") as f:
f.write(s)
- b=packager.backtick(["gzip", "-9c", d+"/Packages"])
- with open(d+"/Packages.gz", "wb") as f:
+ b = packager.backtick(["gzip", "-9c", d + "/Packages"])
+ with open(d + "/Packages.gz", "wb") as f:
f.write(b)
finally:
os.chdir(oldpwd)
# Notes: the Release{,.gpg} files must live in a special place,
# and must be created after all the Packages.gz files have been
# done.
- s="""Origin: mongodb
+ s = """Origin: mongodb
Label: mongodb
Suite: %s
Codename: %s/mongodb-enterprise
@@ -274,13 +293,13 @@ Architectures: amd64 ppc64el s390x arm64
Components: %s
Description: MongoDB packages
""" % (distro.repo_os_version(build_os), distro.repo_os_version(build_os), distro.repo_component())
- if os.path.exists(repo+"../../Release"):
- os.unlink(repo+"../../Release")
- if os.path.exists(repo+"../../Release.gpg"):
- os.unlink(repo+"../../Release.gpg")
- oldpwd=os.getcwd()
- os.chdir(repo+"../../")
- s2=packager.backtick(["apt-ftparchive", "release", "."])
+ if os.path.exists(repo + "../../Release"):
+ os.unlink(repo + "../../Release")
+ if os.path.exists(repo + "../../Release.gpg"):
+ os.unlink(repo + "../../Release.gpg")
+ oldpwd = os.getcwd()
+ os.chdir(repo + "../../")
+ s2 = packager.backtick(["apt-ftparchive", "release", "."])
try:
with open("Release", 'w') as f:
f.write(s)
@@ -296,20 +315,20 @@ def move_repos_into_place(src, dst):
# one. This feels like a lot of hooey for something so trivial.
# First, make a crispy fresh new directory to put the stuff in.
- i=0
+ i = 0
while True:
- date_suffix=time.strftime("%Y-%m-%d")
- dname=dst+".%s.%d" % (date_suffix, i)
+ date_suffix = time.strftime("%Y-%m-%d")
+ dname = dst + ".%s.%d" % (date_suffix, i)
try:
os.mkdir(dname)
break
except OSError:
- exc=sys.exc_value
+ exc = sys.exc_value
if exc.errno == errno.EEXIST:
pass
else:
raise exc
- i=i+1
+ i = i + 1
# Put the stuff in our new directory.
for r in os.listdir(src):
@@ -317,40 +336,41 @@ def move_repos_into_place(src, dst):
# Make a symlink to the new directory; the symlink will be renamed
# to dst shortly.
- i=0
+ i = 0
while True:
- tmpnam=dst+".TMP.%d" % i
+ tmpnam = dst + ".TMP.%d" % i
try:
os.symlink(dname, tmpnam)
break
- except OSError: # as exc: # Python >2.5
- exc=sys.exc_value
+ except OSError: # as exc: # Python >2.5
+ exc = sys.exc_value
if exc.errno == errno.EEXIST:
pass
else:
raise exc
- i=i+1
+ i = i + 1
# Make a symlink to the old directory; this symlink will be
# renamed shortly, too.
- oldnam=None
+ oldnam = None
if os.path.exists(dst):
- i=0
- while True:
- oldnam=dst+".old.%d" % i
- try:
- os.symlink(os.readlink(dst), oldnam)
- break
- except OSError: # as exc: # Python >2.5
- exc=sys.exc_value
- if exc.errno == errno.EEXIST:
- pass
- else:
- raise exc
+ i = 0
+ while True:
+ oldnam = dst + ".old.%d" % i
+ try:
+ os.symlink(os.readlink(dst), oldnam)
+ break
+ except OSError: # as exc: # Python >2.5
+ exc = sys.exc_value
+ if exc.errno == errno.EEXIST:
+ pass
+ else:
+ raise exc
os.rename(tmpnam, dst)
if oldnam:
- os.rename(oldnam, dst+".old")
+ os.rename(oldnam, dst + ".old")
+
if __name__ == "__main__":
main(sys.argv)
diff --git a/buildscripts/packager.py b/buildscripts/packager.py
index 3cab40dc140..eda7608d6c8 100755
--- a/buildscripts/packager.py
+++ b/buildscripts/packager.py
@@ -38,14 +38,14 @@ import tempfile
import time
# The MongoDB names for the architectures we support.
-ARCH_CHOICES=["x86_64", "arm64"]
+ARCH_CHOICES = ["x86_64", "arm64"]
# Made up names for the flavors of distribution we package for.
-DISTROS=["suse", "debian","redhat","ubuntu", "amazon"]
+DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon"]
class Spec(object):
- def __init__(self, ver, gitspec = None, rel = None):
+ def __init__(self, ver, gitspec=None, rel=None):
self.ver = ver
self.gitspec = gitspec
self.rel = rel
@@ -54,7 +54,8 @@ class Spec(object):
# Patch builds version numbers are in the form: 3.5.5-64-g03945fa-patch-58debcdb3ff1223c9d00005b
#
def is_nightly(self):
- return bool(re.search("-$", self.version())) or bool(re.search("\d-\d+-g[0-9a-f]+$", self.version()))
+ return bool(re.search("-$", self.version())) or bool(
+ re.search("\d-\d+-g[0-9a-f]+$", self.version()))
def is_patch(self):
return bool(re.search("\d-\d+-g[0-9a-f]+-patch-[0-9a-f]+$", self.version()))
@@ -77,10 +78,10 @@ class Spec(object):
def metadata_gitspec(self):
"""Git revision to use for spec+control+init+manpage files.
The default is the release tag for the version being packaged."""
- if(self.gitspec):
- return self.gitspec
+ if (self.gitspec):
+ return self.gitspec
else:
- return 'r' + self.version()
+ return 'r' + self.version()
def version_better_than(self, version_string):
# FIXME: this is wrong, but I'm in a hurry.
@@ -88,33 +89,33 @@ class Spec(object):
return self.ver > version_string
def suffix(self):
- return "-org" if int(self.ver.split(".")[1])%2==0 else "-org-unstable"
+ return "-org" if int(self.ver.split(".")[1]) % 2 == 0 else "-org-unstable"
def prelease(self):
- # NOTE: This is only called for RPM packages, and only after
- # pversion() below has been called. If you want to change this format
- # and want DEB packages to match, make sure to update pversion()
- # below
- #
- # "N" is either passed in on the command line, or "1"
- if self.rel:
- corenum = self.rel
- else:
- corenum = 1
-
- # Version suffix for RPM packages:
- # 1) RC's - "0.N.rcX"
- # 2) Nightly (snapshot) - "0.N.latest"
- # 3) Patch builds - "0.N.patch.<patch_id>"
- # 4) Standard release - "N"
- if self.is_rc():
- return "0.%s.%s" % (corenum, re.sub('.*-','',self.version()))
- elif self.is_nightly():
- return "0.%s.latest" % (corenum)
- elif self.is_patch():
- return "0.%s.patch.%s" % (corenum, self.patch_id())
- else:
- return str(corenum)
+ # NOTE: This is only called for RPM packages, and only after
+ # pversion() below has been called. If you want to change this format
+ # and want DEB packages to match, make sure to update pversion()
+ # below
+ #
+ # "N" is either passed in on the command line, or "1"
+ if self.rel:
+ corenum = self.rel
+ else:
+ corenum = 1
+
+ # Version suffix for RPM packages:
+ # 1) RC's - "0.N.rcX"
+ # 2) Nightly (snapshot) - "0.N.latest"
+ # 3) Patch builds - "0.N.patch.<patch_id>"
+ # 4) Standard release - "N"
+ if self.is_rc():
+ return "0.%s.%s" % (corenum, re.sub('.*-', '', self.version()))
+ elif self.is_nightly():
+ return "0.%s.latest" % (corenum)
+ elif self.is_patch():
+ return "0.%s.patch.%s" % (corenum, self.patch_id())
+ else:
+ return str(corenum)
def pversion(self, distro):
# Note: Debian packages have funny rules about dashes in
@@ -149,9 +150,10 @@ class Spec(object):
"""
return ".".join(self.ver.split(".")[0:2])
+
class Distro(object):
def __init__(self, string):
- self.n=string
+ self.n = string
def name(self):
return self.n
@@ -172,18 +174,18 @@ class Distro(object):
elif arch == "arm64":
return "arm64"
elif arch.endswith("86"):
- return "i386"
+ return "i386"
else:
- return "amd64"
+ return "amd64"
elif re.search("^(suse|centos|redhat|fedora|amazon)", self.n):
if arch == "ppc64le":
return "ppc64le"
elif arch == "s390x":
return "s390x"
elif arch.endswith("86"):
- return "i686"
+ return "i686"
else:
- return "x86_64"
+ return "x86_64"
else:
raise Exception("BUG: unsupported platform?")
@@ -215,16 +217,23 @@ class Distro(object):
repo_directory = ""
if spec.is_pre_release():
- repo_directory = "testing"
+ repo_directory = "testing"
else:
- repo_directory = spec.branch()
+ repo_directory = spec.branch()
if re.search("^(debian|ubuntu)", self.n):
- return "repo/apt/%s/dists/%s/mongodb-org/%s/%s/binary-%s/" % (self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(), self.archname(arch))
+ return "repo/apt/%s/dists/%s/mongodb-org/%s/%s/binary-%s/" % (
+ self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
+ self.archname(arch))
elif re.search("(redhat|fedora|centos|amazon)", self.n):
- return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
+ self.repo_os_version(build_os),
+ repo_directory, self.archname(arch))
elif re.search("(suse)", self.n):
- return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
+ return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
+ self.repo_os_version(build_os),
+ repo_directory,
+ self.archname(arch))
else:
raise Exception("BUG: unsupported platform?")
@@ -232,9 +241,9 @@ class Distro(object):
"""Return the name of the section/component/pool we are publishing into -
e.g. "multiverse" for Ubuntu, "main" for debian."""
if self.n == 'ubuntu':
- return "multiverse"
+ return "multiverse"
elif self.n == 'debian':
- return "main"
+ return "main"
else:
raise Exception("unsupported distro: %s" % self.n)
@@ -285,15 +294,19 @@ class Distro(object):
raise Exception("BUG: unsupported architecture (%s)" % arch)
if re.search("(suse)", self.n):
- return [ "suse11", "suse12" ]
+ return ["suse11", "suse12"]
elif re.search("(redhat|fedora|centos)", self.n):
- return [ "rhel70", "rhel71", "rhel72", "rhel62", "rhel55" ]
+ return ["rhel70", "rhel71", "rhel72", "rhel62", "rhel55"]
elif self.n == 'amazon':
- return [ "amazon" ]
+ return ["amazon"]
elif self.n == 'ubuntu':
- return [ "ubuntu1204", "ubuntu1404", "ubuntu1604", ]
+ return [
+ "ubuntu1204",
+ "ubuntu1404",
+ "ubuntu1604",
+ ]
elif self.n == 'debian':
- return [ "debian71", "debian81", "debian92" ]
+ return ["debian71", "debian81", "debian92"]
else:
raise Exception("BUG: unsupported platform?")
@@ -302,70 +315,79 @@ class Distro(object):
"el6" for rhel 6.x, return anything else unchanged"""
if self.n == 'amazon':
- return 'amzn1'
+ return 'amzn1'
else:
- return re.sub(r'^rh(el\d).*$', r'\1', build_os)
+ return re.sub(r'^rh(el\d).*$', r'\1', build_os)
+
def get_args(distros, arch_choices):
- distro_choices=[]
+ distro_choices = []
for distro in distros:
for arch in arch_choices:
- distro_choices.extend(distro.build_os(arch))
+ distro_choices.extend(distro.build_os(arch))
parser = argparse.ArgumentParser(description='Build MongoDB Packages')
- parser.add_argument("-s", "--server-version", help="Server version to build (e.g. 2.7.8-rc0)", required=True)
- parser.add_argument("-m", "--metadata-gitspec", help="Gitspec to use for package metadata files", required=False)
- parser.add_argument("-r", "--release-number", help="RPM release number base", type=int, required=False)
- parser.add_argument("-d", "--distros", help="Distros to build for", choices=distro_choices, required=False, default=[], action='append')
+ parser.add_argument("-s", "--server-version", help="Server version to build (e.g. 2.7.8-rc0)",
+ required=True)
+ parser.add_argument("-m", "--metadata-gitspec",
+ help="Gitspec to use for package metadata files", required=False)
+ parser.add_argument("-r", "--release-number", help="RPM release number base", type=int,
+ required=False)
+ parser.add_argument("-d", "--distros", help="Distros to build for", choices=distro_choices,
+ required=False, default=[], action='append')
parser.add_argument("-p", "--prefix", help="Directory to build into", required=False)
- parser.add_argument("-a", "--arches", help="Architecture to build", choices=arch_choices, default=[], required=False, action='append')
- parser.add_argument("-t", "--tarball", help="Local tarball to package", required=True, type=lambda x: is_valid_file(parser, x))
+ parser.add_argument("-a", "--arches", help="Architecture to build", choices=arch_choices,
+ default=[], required=False, action='append')
+ parser.add_argument("-t", "--tarball", help="Local tarball to package", required=True,
+ type=lambda x: is_valid_file(parser, x))
args = parser.parse_args()
if len(args.distros) * len(args.arches) > 1 and args.tarball:
- parser.error("Can only specify local tarball with one distro/arch combination")
+ parser.error("Can only specify local tarball with one distro/arch combination")
return args
+
def main(argv):
- distros=[Distro(distro) for distro in DISTROS]
+ distros = [Distro(distro) for distro in DISTROS]
args = get_args(distros, ARCH_CHOICES)
spec = Spec(args.server_version, args.metadata_gitspec, args.release_number)
- oldcwd=os.getcwd()
- srcdir=oldcwd+"/../"
+ oldcwd = os.getcwd()
+ srcdir = oldcwd + "/../"
# Where to do all of our work. Use a randomly-created directory if one
# is not passed in.
prefix = args.prefix
if prefix is None:
- prefix = tempfile.mkdtemp()
+ prefix = tempfile.mkdtemp()
print "Working in directory %s" % prefix
os.chdir(prefix)
try:
- # Build a package for each distro/spec/arch tuple, and
- # accumulate the repository-layout directories.
- for (distro, arch) in crossproduct(distros, args.arches):
+ # Build a package for each distro/spec/arch tuple, and
+ # accumulate the repository-layout directories.
+ for (distro, arch) in crossproduct(distros, args.arches):
- for build_os in distro.build_os(arch):
- if build_os in args.distros or not args.distros:
+ for build_os in distro.build_os(arch):
+ if build_os in args.distros or not args.distros:
- filename = tarfile(build_os, arch, spec)
- ensure_dir(filename)
- shutil.copyfile(args.tarball, filename)
+ filename = tarfile(build_os, arch, spec)
+ ensure_dir(filename)
+ shutil.copyfile(args.tarball, filename)
- repo = make_package(distro, build_os, arch, spec, srcdir)
- make_repo(repo, distro, build_os, spec)
+ repo = make_package(distro, build_os, arch, spec, srcdir)
+ make_repo(repo, distro, build_os, spec)
finally:
os.chdir(oldcwd)
+
def crossproduct(*seqs):
"""A generator for iterating all the tuples consisting of elements
of seqs."""
@@ -378,16 +400,18 @@ def crossproduct(*seqs):
else:
for lst in crossproduct(*seqs[:-1]):
for i in seqs[-1]:
- lst2=list(lst)
+ lst2 = list(lst)
lst2.append(i)
yield lst2
+
def sysassert(argv):
"""Run argv and assert that it exited with status 0."""
print "In %s, running %s" % (os.getcwd(), " ".join(argv))
sys.stdout.flush()
sys.stderr.flush()
- assert(subprocess.Popen(argv).wait()==0)
+ assert (subprocess.Popen(argv).wait() == 0)
+
def backtick(argv):
"""Run argv and return its output string."""
@@ -396,11 +420,13 @@ def backtick(argv):
sys.stderr.flush()
return subprocess.Popen(argv, stdout=subprocess.PIPE).communicate()[0]
+
def tarfile(build_os, arch, spec):
"""Return the location where we store the downloaded tarball for
this package"""
return "dl/mongodb-linux-%s-%s-%s.tar.gz" % (spec.version(), build_os, arch)
+
def setupdir(distro, build_os, arch, spec):
# The setupdir will be a directory containing all inputs to the
# distro's packaging tools (e.g., package metadata files, init
@@ -408,11 +434,13 @@ def setupdir(distro, build_os, arch, spec):
# the following format string is unclear, an example setupdir
# would be dst/x86_64/debian-sysvinit/wheezy/mongodb-org-unstable/
# or dst/x86_64/redhat/rhel55/mongodb-org-unstable/
- return "dst/%s/%s/%s/%s%s-%s/" % (arch, distro.name(), build_os, distro.pkgbase(), spec.suffix(), spec.pversion(distro))
+ return "dst/%s/%s/%s/%s%s-%s/" % (arch, distro.name(), build_os, distro.pkgbase(),
+ spec.suffix(), spec.pversion(distro))
+
def unpack_binaries_into(build_os, arch, spec, where):
"""Unpack the tarfile for (build_os, arch, spec) into directory where."""
- rootdir=os.getcwd()
+ rootdir = os.getcwd()
ensure_dir(where)
# Note: POSIX tar doesn't require support for gtar's "-C" option,
# and Python's tarfile module prior to Python 2.7 doesn't have the
@@ -420,24 +448,25 @@ def unpack_binaries_into(build_os, arch, spec, where):
# thing and chdir into where and run tar there.
os.chdir(where)
try:
- sysassert(["tar", "xvzf", rootdir+"/"+tarfile(build_os, arch, spec)])
+ sysassert(["tar", "xvzf", rootdir + "/" + tarfile(build_os, arch, spec)])
release_dir = glob('mongodb-linux-*')[0]
for releasefile in "bin", "GNU-AGPL-3.0", "README", "THIRD-PARTY-NOTICES", "MPL-2":
print "moving file: %s/%s" % (release_dir, releasefile)
os.rename("%s/%s" % (release_dir, releasefile), releasefile)
os.rmdir(release_dir)
except Exception:
- exc=sys.exc_value
+ exc = sys.exc_value
os.chdir(rootdir)
raise exc
os.chdir(rootdir)
+
def make_package(distro, build_os, arch, spec, srcdir):
"""Construct the package for (arch, distro, spec), getting
packaging files from srcdir and any user-specified suffix from
suffixes"""
- sdir=setupdir(distro, build_os, arch, spec)
+ sdir = setupdir(distro, build_os, arch, spec)
ensure_dir(sdir)
# Note that the RPM packages get their man pages from the debian
# directory, so the debian directory is needed in all cases (and
@@ -445,7 +474,11 @@ def make_package(distro, build_os, arch, spec, srcdir):
for pkgdir in ["debian", "rpm"]:
print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
# FIXME: sh-dash-cee is bad. See if tarfile can do this.
- sysassert(["sh", "-c", "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" % (srcdir, spec.metadata_gitspec(), pkgdir, sdir)])
+ sysassert([
+ "sh", "-c",
+ "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" %
+ (srcdir, spec.metadata_gitspec(), pkgdir, sdir)
+ ])
# Splat the binaries under sdir. The "build" stages of the
# packaging infrastructure will move the files to wherever they
# need to go.
@@ -453,9 +486,10 @@ def make_package(distro, build_os, arch, spec, srcdir):
# Remove the mongoreplay binary due to libpcap dynamic
# linkage.
if os.path.exists(sdir + "bin/mongoreplay"):
- os.unlink(sdir + "bin/mongoreplay")
+ os.unlink(sdir + "bin/mongoreplay")
return distro.make_pkg(build_os, arch, spec, srcdir)
+
def make_repo(repodir, distro, build_os, spec):
if re.search("(debian|ubuntu)", repodir):
make_deb_repo(repodir, distro, build_os, spec)
@@ -464,81 +498,92 @@ def make_repo(repodir, distro, build_os, spec):
else:
raise Exception("BUG: unsupported platform?")
+
def make_deb(distro, build_os, arch, spec, srcdir):
# I can't remember the details anymore, but the initscript/upstart
# job files' names must match the package name in some way; and
# see also the --name flag to dh_installinit in the generated
# debian/rules file.
- suffix=spec.suffix()
- sdir=setupdir(distro, build_os, arch, spec)
+ suffix = spec.suffix()
+ sdir = setupdir(distro, build_os, arch, spec)
if re.search("debian", distro.name()):
- os.unlink(sdir+"debian/mongod.upstart")
+ os.unlink(sdir + "debian/mongod.upstart")
if build_os == "debian71":
- os.link(sdir+"debian/init.d", sdir+"debian/%s%s-server.mongod.init" % (distro.pkgbase(), suffix))
- os.unlink(sdir+"debian/mongod.service")
+ os.link(sdir + "debian/init.d",
+ sdir + "debian/%s%s-server.mongod.init" % (distro.pkgbase(), suffix))
+ os.unlink(sdir + "debian/mongod.service")
else:
- os.link(sdir+"debian/mongod.service", sdir+"debian/%s%s-server.mongod.service" % (distro.pkgbase(), suffix))
- os.unlink(sdir+"debian/init.d")
+ os.link(sdir + "debian/mongod.service",
+ sdir + "debian/%s%s-server.mongod.service" % (distro.pkgbase(), suffix))
+ os.unlink(sdir + "debian/init.d")
elif re.search("ubuntu", distro.name()):
- os.unlink(sdir+"debian/init.d")
+ os.unlink(sdir + "debian/init.d")
if build_os in ("ubuntu1204", "ubuntu1404", "ubuntu1410"):
- os.link(sdir+"debian/mongod.upstart", sdir+"debian/%s%s-server.mongod.upstart" % (distro.pkgbase(), suffix))
- os.unlink(sdir+"debian/mongod.service")
+ os.link(sdir + "debian/mongod.upstart",
+ sdir + "debian/%s%s-server.mongod.upstart" % (distro.pkgbase(), suffix))
+ os.unlink(sdir + "debian/mongod.service")
else:
- os.link(sdir+"debian/mongod.service", sdir+"debian/%s%s-server.mongod.service" % (distro.pkgbase(), suffix))
- os.unlink(sdir+"debian/mongod.upstart")
+ os.link(sdir + "debian/mongod.service",
+ sdir + "debian/%s%s-server.mongod.service" % (distro.pkgbase(), suffix))
+ os.unlink(sdir + "debian/mongod.upstart")
else:
raise Exception("unknown debianoid flavor: not debian or ubuntu?")
# Rewrite the control and rules files
- write_debian_changelog(sdir+"debian/changelog", spec, srcdir)
- distro_arch=distro.archname(arch)
- sysassert(["cp", "-v", srcdir+"debian/%s%s.control" % (distro.pkgbase(), suffix), sdir+"debian/control"])
- sysassert(["cp", "-v", srcdir+"debian/%s%s.rules" % (distro.pkgbase(), suffix), sdir+"debian/rules"])
-
+ write_debian_changelog(sdir + "debian/changelog", spec, srcdir)
+ distro_arch = distro.archname(arch)
+ sysassert([
+ "cp", "-v", srcdir + "debian/%s%s.control" % (distro.pkgbase(), suffix),
+ sdir + "debian/control"
+ ])
+ sysassert([
+ "cp", "-v", srcdir + "debian/%s%s.rules" % (distro.pkgbase(), suffix), sdir + "debian/rules"
+ ])
# old non-server-package postinst will be hanging around for old versions
#
- if os.path.exists(sdir+"debian/postinst"):
- os.unlink(sdir+"debian/postinst")
+ if os.path.exists(sdir + "debian/postinst"):
+ os.unlink(sdir + "debian/postinst")
# copy our postinst files
#
- sysassert(["sh", "-c", "cp -v \"%sdebian/\"*.postinst \"%sdebian/\""%(srcdir, sdir)])
+ sysassert(["sh", "-c", "cp -v \"%sdebian/\"*.postinst \"%sdebian/\"" % (srcdir, sdir)])
# Do the packaging.
- oldcwd=os.getcwd()
+ oldcwd = os.getcwd()
try:
os.chdir(sdir)
sysassert(["dpkg-buildpackage", "-uc", "-us", "-a" + distro_arch])
finally:
os.chdir(oldcwd)
- r=distro.repodir(arch, build_os, spec)
+ r = distro.repodir(arch, build_os, spec)
ensure_dir(r)
# FIXME: see if shutil.copyfile or something can do this without
# much pain.
- sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\""%(sdir, r)])
+ sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\"" % (sdir, r)])
return r
+
def make_deb_repo(repo, distro, build_os, spec):
# Note: the Debian repository Packages files must be generated
# very carefully in order to be usable.
- oldpwd=os.getcwd()
- os.chdir(repo+"../../../../../../")
+ oldpwd = os.getcwd()
+ os.chdir(repo + "../../../../../../")
try:
- dirs=set([os.path.dirname(deb)[2:] for deb in backtick(["find", ".", "-name", "*.deb"]).split()])
+ dirs = set(
+ [os.path.dirname(deb)[2:] for deb in backtick(["find", ".", "-name", "*.deb"]).split()])
for d in dirs:
- s=backtick(["dpkg-scanpackages", d, "/dev/null"])
- with open(d+"/Packages", "w") as f:
+ s = backtick(["dpkg-scanpackages", d, "/dev/null"])
+ with open(d + "/Packages", "w") as f:
f.write(s)
- b=backtick(["gzip", "-9c", d+"/Packages"])
- with open(d+"/Packages.gz", "wb") as f:
+ b = backtick(["gzip", "-9c", d + "/Packages"])
+ with open(d + "/Packages.gz", "wb") as f:
f.write(b)
finally:
os.chdir(oldpwd)
# Notes: the Release{,.gpg} files must live in a special place,
# and must be created after all the Packages.gz files have been
# done.
- s="""Origin: mongodb
+ s = """Origin: mongodb
Label: mongodb
Suite: %s
Codename: %s/mongodb-org
@@ -546,13 +591,13 @@ Architectures: amd64 arm64
Components: %s
Description: MongoDB packages
""" % (distro.repo_os_version(build_os), distro.repo_os_version(build_os), distro.repo_component())
- if os.path.exists(repo+"../../Release"):
- os.unlink(repo+"../../Release")
- if os.path.exists(repo+"../../Release.gpg"):
- os.unlink(repo+"../../Release.gpg")
- oldpwd=os.getcwd()
- os.chdir(repo+"../../")
- s2=backtick(["apt-ftparchive", "release", "."])
+ if os.path.exists(repo + "../../Release"):
+ os.unlink(repo + "../../Release")
+ if os.path.exists(repo + "../../Release.gpg"):
+ os.unlink(repo + "../../Release.gpg")
+ oldpwd = os.getcwd()
+ os.chdir(repo + "../../")
+ s2 = backtick(["apt-ftparchive", "release", "."])
try:
with open("Release", 'w') as f:
f.write(s)
@@ -568,20 +613,20 @@ def move_repos_into_place(src, dst):
# one. This feels like a lot of hooey for something so trivial.
# First, make a crispy fresh new directory to put the stuff in.
- i=0
+ i = 0
while True:
- date_suffix=time.strftime("%Y-%m-%d")
- dname=dst+".%s.%d" % (date_suffix, i)
+ date_suffix = time.strftime("%Y-%m-%d")
+ dname = dst + ".%s.%d" % (date_suffix, i)
try:
os.mkdir(dname)
break
except OSError:
- exc=sys.exc_value
+ exc = sys.exc_value
if exc.errno == errno.EEXIST:
pass
else:
raise exc
- i=i+1
+ i = i + 1
# Put the stuff in our new directory.
for r in os.listdir(src):
@@ -589,65 +634,69 @@ def move_repos_into_place(src, dst):
# Make a symlink to the new directory; the symlink will be renamed
# to dst shortly.
- i=0
+ i = 0
while True:
- tmpnam=dst+".TMP.%d" % i
+ tmpnam = dst + ".TMP.%d" % i
try:
os.symlink(dname, tmpnam)
break
- except OSError: # as exc: # Python >2.5
- exc=sys.exc_value
+ except OSError: # as exc: # Python >2.5
+ exc = sys.exc_value
if exc.errno == errno.EEXIST:
pass
else:
raise exc
- i=i+1
+ i = i + 1
# Make a symlink to the old directory; this symlink will be
# renamed shortly, too.
- oldnam=None
+ oldnam = None
if os.path.exists(dst):
- i=0
- while True:
- oldnam=dst+".old.%d" % i
- try:
- os.symlink(os.readlink(dst), oldnam)
- break
- except OSError: # as exc: # Python >2.5
- exc=sys.exc_value
- if exc.errno == errno.EEXIST:
- pass
- else:
- raise exc
+ i = 0
+ while True:
+ oldnam = dst + ".old.%d" % i
+ try:
+ os.symlink(os.readlink(dst), oldnam)
+ break
+ except OSError: # as exc: # Python >2.5
+ exc = sys.exc_value
+ if exc.errno == errno.EEXIST:
+ pass
+ else:
+ raise exc
os.rename(tmpnam, dst)
if oldnam:
- os.rename(oldnam, dst+".old")
+ os.rename(oldnam, dst + ".old")
def write_debian_changelog(path, spec, srcdir):
- oldcwd=os.getcwd()
+ oldcwd = os.getcwd()
os.chdir(srcdir)
- preamble=""
+ preamble = ""
try:
- s=preamble+backtick(["sh", "-c", "git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()])
+ s = preamble + backtick(
+ ["sh", "-c",
+ "git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()])
finally:
os.chdir(oldcwd)
- lines=s.split("\n")
+ lines = s.split("\n")
# If the first line starts with "mongodb", it's not a revision
# preamble, and so frob the version number.
- lines[0]=re.sub("^mongodb \\(.*\\)", "mongodb (%s)" % (spec.pversion(Distro("debian"))), lines[0])
+ lines[0] = re.sub("^mongodb \\(.*\\)", "mongodb (%s)" % (spec.pversion(Distro("debian"))),
+ lines[0])
# Rewrite every changelog entry starting in mongodb<space>
- lines=[re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), l) for l in lines]
- lines=[re.sub("^ --", " --", l) for l in lines]
- s="\n".join(lines)
+ lines = [re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), l) for l in lines]
+ lines = [re.sub("^ --", " --", l) for l in lines]
+ s = "\n".join(lines)
with open(path, 'w') as f:
f.write(s)
+
def make_rpm(distro, build_os, arch, spec, srcdir):
# Create the specfile.
- suffix=spec.suffix()
- sdir=setupdir(distro, build_os, arch, spec)
+ suffix = spec.suffix()
+ sdir = setupdir(distro, build_os, arch, spec)
specfile = srcdir + "rpm/mongodb%s.spec" % suffix
init_spec = specfile.replace(".spec", "-init.spec")
@@ -662,8 +711,8 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
# distros.
#
if distro.name() == "suse" and distro.repo_os_version(build_os) in ("10", "11"):
- os.unlink(sdir+"rpm/init.d-mongod")
- os.link(sdir+"rpm/init.d-mongod.suse", sdir+"rpm/init.d-mongod")
+ os.unlink(sdir + "rpm/init.d-mongod")
+ os.link(sdir + "rpm/init.d-mongod.suse", sdir + "rpm/init.d-mongod")
os.unlink(specfile)
os.link(init_spec, specfile)
@@ -674,10 +723,10 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
os.unlink(specfile)
os.link(init_spec, specfile)
- topdir=ensure_dir('%s/rpmbuild/%s/' % (os.getcwd(), build_os))
+ topdir = ensure_dir('%s/rpmbuild/%s/' % (os.getcwd(), build_os))
for subdir in ["BUILD", "RPMS", "SOURCES", "SPECS", "SRPMS"]:
ensure_dir("%s/%s/" % (topdir, subdir))
- distro_arch=distro.archname(arch)
+ distro_arch = distro.archname(arch)
# RPM tools take these macro files that define variables in
# RPMland. Unfortunately, there's no way to tell RPM tools to use
# a given file *in addition* to the files that it would already
@@ -697,53 +746,68 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
# On RHEL systems, --rcfile will generally be used and
# --macros will be used in Ubuntu.
#
- macrofiles=[l for l in backtick(["rpm", "--showrc"]).split("\n") if l.startswith("macrofiles")]
- flags=[]
- macropath=os.getcwd()+"/macros"
+ macrofiles = [
+ l for l in backtick(["rpm", "--showrc"]).split("\n") if l.startswith("macrofiles")
+ ]
+ flags = []
+ macropath = os.getcwd() + "/macros"
write_rpm_macros_file(macropath, topdir, distro.release_dist(build_os))
- if len(macrofiles)>0:
- macrofiles=macrofiles[0]+":"+macropath
- rcfile=os.getcwd()+"/rpmrc"
+ if len(macrofiles) > 0:
+ macrofiles = macrofiles[0] + ":" + macropath
+ rcfile = os.getcwd() + "/rpmrc"
write_rpmrc_file(rcfile, macrofiles)
- flags=["--rcfile", rcfile]
+ flags = ["--rcfile", rcfile]
else:
# This hard-coded hooey came from some box running RPM
# 4.4.2.3. It may not work over time, but RPM isn't sanely
# configurable.
- flags=["--macros", "/usr/lib/rpm/macros:/usr/lib/rpm/%s-linux/macros:/usr/lib/rpm/suse/macros:/etc/rpm/macros.*:/etc/rpm/macros:/etc/rpm/%s-linux/macros:~/.rpmmacros:%s" % (distro_arch, distro_arch, macropath)]
+ flags = [
+ "--macros",
+ "/usr/lib/rpm/macros:/usr/lib/rpm/%s-linux/macros:/usr/lib/rpm/suse/macros:/etc/rpm/macros.*:/etc/rpm/macros:/etc/rpm/%s-linux/macros:~/.rpmmacros:%s"
+ % (distro_arch, distro_arch, macropath)
+ ]
# Put the specfile and the tar'd up binaries and stuff in
# place.
#
# The version of rpm and rpm tools in RHEL 5.5 can't interpolate the
# %{dynamic_version} macro, so do it manually
with open(specfile, "r") as spec_source:
- with open(topdir+"SPECS/" + os.path.basename(specfile), "w") as spec_dest:
- for line in spec_source:
- line = line.replace('%{dynamic_version}', spec.pversion(distro))
- line = line.replace('%{dynamic_release}', spec.prelease())
- spec_dest.write(line)
-
- oldcwd=os.getcwd()
- os.chdir(sdir+"/../")
+ with open(topdir + "SPECS/" + os.path.basename(specfile), "w") as spec_dest:
+ for line in spec_source:
+ line = line.replace('%{dynamic_version}', spec.pversion(distro))
+ line = line.replace('%{dynamic_release}', spec.prelease())
+ spec_dest.write(line)
+
+ oldcwd = os.getcwd()
+ os.chdir(sdir + "/../")
try:
- sysassert(["tar", "-cpzf", topdir+"SOURCES/mongodb%s-%s.tar.gz" % (suffix, spec.pversion(distro)), os.path.basename(os.path.dirname(sdir))])
+ sysassert([
+ "tar", "-cpzf",
+ topdir + "SOURCES/mongodb%s-%s.tar.gz" % (suffix, spec.pversion(distro)),
+ os.path.basename(os.path.dirname(sdir))
+ ])
finally:
os.chdir(oldcwd)
# Do the build.
- flags.extend(["-D", "dynamic_version " + spec.pversion(distro), "-D", "dynamic_release " + spec.prelease(), "-D", "_topdir " + topdir])
- sysassert(["rpmbuild", "-ba", "--target", distro_arch] + flags + ["%s/SPECS/mongodb%s.spec" % (topdir, suffix)])
- r=distro.repodir(arch, build_os, spec)
+ flags.extend([
+ "-D", "dynamic_version " + spec.pversion(distro), "-D",
+ "dynamic_release " + spec.prelease(), "-D", "_topdir " + topdir
+ ])
+ sysassert(["rpmbuild", "-ba", "--target", distro_arch] + flags +
+ ["%s/SPECS/mongodb%s.spec" % (topdir, suffix)])
+ r = distro.repodir(arch, build_os, spec)
ensure_dir(r)
# FIXME: see if some combination of shutil.copy<hoohah> and glob
# can do this without shelling out.
- sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\""%(topdir, distro_arch, r)])
+ sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\"" % (topdir, distro_arch, r)])
return r
+
def make_rpm_repo(repo):
- oldpwd=os.getcwd()
- os.chdir(repo+"../")
+ oldpwd = os.getcwd()
+ os.chdir(repo + "../")
try:
sysassert(["createrepo", "."])
finally:
@@ -754,26 +818,29 @@ def write_rpmrc_file(path, string):
with open(path, 'w') as f:
f.write(string)
+
def write_rpm_macros_file(path, topdir, release_dist):
with open(path, 'w') as f:
f.write("%%_topdir %s\n" % topdir)
f.write("%%dist .%s\n" % release_dist)
f.write("%_use_internal_dependency_generator 0\n")
+
def ensure_dir(filename):
"""Make sure that the directory that's the dirname part of
filename exists, and return filename."""
dirpart = os.path.dirname(filename)
try:
os.makedirs(dirpart)
- except OSError: # as exc: # Python >2.5
- exc=sys.exc_value
+ except OSError: # as exc: # Python >2.5
+ exc = sys.exc_value
if exc.errno == errno.EEXIST:
pass
else:
raise exc
return filename
+
def is_valid_file(parser, filename):
"""Check if file exists, and return the filename"""
if not os.path.exists(filename):
@@ -781,5 +848,6 @@ def is_valid_file(parser, filename):
else:
return filename
+
if __name__ == "__main__":
main(sys.argv)
diff --git a/buildscripts/promote_silent_failures.py b/buildscripts/promote_silent_failures.py
index aa6a71d5b83..81bdb72e694 100644
--- a/buildscripts/promote_silent_failures.py
+++ b/buildscripts/promote_silent_failures.py
@@ -15,7 +15,6 @@ import optparse
import os
import sys
-
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -31,12 +30,10 @@ def main():
usage = "usage: %prog [options] report.json"
parser = optparse.OptionParser(usage=usage)
- parser.add_option("-o", "--output-file",
- dest="outfile",
- default="-",
- help="If '-', then the report file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout.")
+ parser.add_option("-o", "--output-file", dest="outfile", default="-",
+ help=("If '-', then the report file is written to stdout."
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout."))
(options, args) = parser.parse_args()
@@ -64,5 +61,6 @@ def main():
else:
print(json.dumps(result_report))
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/prune_check.py b/buildscripts/prune_check.py
index 8445418505a..570715c4c1f 100644
--- a/buildscripts/prune_check.py
+++ b/buildscripts/prune_check.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python2
-
""" This program stamps the shared scons directory with a timestamp so we can
determine the last prune time and run the prune script on a schedule.
It is meant to be invoked from the shell:
@@ -76,12 +75,14 @@ def check_last_prune_time(args):
# A 0 return code signals our Evergreen task that we should run the prune script.
# Otherwise, return 1 and skip pruning.
if diff.total_seconds() > seconds_since_last_prune:
- print("It has been {0:.2f} seconds ({1:.2f} hours) since last prune."
- .format(diff.total_seconds(), diff.total_seconds()/60/60))
+ print("It has been {0:.2f} seconds ({1:.2f} hours) since last prune.".format(
+ diff.total_seconds(),
+ diff.total_seconds() / 60 / 60))
sys.exit(0)
else:
- print("It has been {0:.2f} seconds ({1:.2f} hours) since last prune."
- .format(diff.total_seconds(), diff.total_seconds()/60/60))
+ print("It has been {0:.2f} seconds ({1:.2f} hours) since last prune.".format(
+ diff.total_seconds(),
+ diff.total_seconds() / 60 / 60))
sys.exit(1)
diff --git a/buildscripts/pylinters.py b/buildscripts/pylinters.py
index 539979e7dfe..617406b2fb3 100755
--- a/buildscripts/pylinters.py
+++ b/buildscripts/pylinters.py
@@ -57,9 +57,9 @@ def get_py_linter(linter_filter):
def is_interesting_file(file_name):
# type: (str) -> bool
""""Return true if this file should be checked."""
- return file_name.endswith(".py") and (file_name.startswith("buildscripts/idl") or
- file_name.startswith("buildscripts/linter") or
- file_name.startswith("buildscripts/pylinters.py"))
+ return file_name.endswith(".py") and (file_name.startswith("buildscripts/idl")
+ or file_name.startswith("buildscripts/linter")
+ or file_name.startswith("buildscripts/pylinters.py"))
def _get_build_dir():
@@ -161,14 +161,12 @@ def main():
dest_prefix = "linter_"
for linter1 in linters:
msg = 'Path to linter %s' % (linter1.cmd_name)
- parser.add_argument(
- '--' + linter1.cmd_name, type=str, help=msg, dest=dest_prefix + linter1.cmd_name)
-
- parser.add_argument(
- '--linters',
- type=str,
- help="Comma separated list of filters to use, defaults to 'all'",
- default="all")
+ parser.add_argument('--' + linter1.cmd_name, type=str, help=msg,
+ dest=dest_prefix + linter1.cmd_name)
+
+ parser.add_argument('--linters', type=str,
+ help="Comma separated list of filters to use, defaults to 'all'",
+ default="all")
parser.add_argument('-v', "--verbose", action='store_true', help="Enable verbose logging")
diff --git a/buildscripts/remote_operations.py b/buildscripts/remote_operations.py
index 18e29fdf0db..b75af20a049 100755
--- a/buildscripts/remote_operations.py
+++ b/buildscripts/remote_operations.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Remote access utilities, via ssh & scp."""
from __future__ import print_function
@@ -21,8 +20,7 @@ if os.name == "posix" and sys.version_info[0] == 2:
import warnings
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
+ " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
import subprocess
else:
import subprocess
@@ -52,26 +50,15 @@ def posix_path(path):
path = path[1:-1]
drive, new_path = os.path.splitdrive(path)
if drive:
- new_path = posixpath.join(
- "/cygdrive",
- drive.split(":")[0],
- *re.split("/|\\\\", new_path))
+ new_path = posixpath.join("/cygdrive", drive.split(":")[0], *re.split("/|\\\\", new_path))
return "{quote}{path}{quote}".format(quote=path_quote, path=new_path)
class RemoteOperations(object):
"""Class to support remote operations."""
- def __init__(self,
- user_host,
- ssh_connection_options=None,
- ssh_options=None,
- scp_options=None,
- retries=0,
- retry_sleep=0,
- debug=False,
- shell_binary="/bin/bash",
- use_shell=False):
+ def __init__(self, user_host, ssh_connection_options=None, ssh_options=None, scp_options=None,
+ retries=0, retry_sleep=0, debug=False, shell_binary="/bin/bash", use_shell=False):
self.user_host = user_host
self.ssh_connection_options = ssh_connection_options if ssh_connection_options else ""
@@ -92,17 +79,15 @@ class RemoteOperations(object):
if not self.use_shell:
cmd = shlex.split(cmd)
# Use a common pipe for stdout & stderr for logging.
- process = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=self.use_shell)
buff_stdout, _ = process.communicate()
return process.poll(), buff_stdout
def _remote_access(self):
""" This will check if a remote session is possible. """
- cmd = "ssh {} {} {} date".format(
- self.ssh_connection_options, self.ssh_options, self.user_host)
+ cmd = "ssh {} {} {} date".format(self.ssh_connection_options, self.ssh_options,
+ self.user_host)
attempt_num = 0
buff = ""
while True:
@@ -159,13 +144,9 @@ class RemoteOperations(object):
operation_param = "{}".format(operation_param.replace("'", r"\'"))
operation_param = "{}".format(operation_param.replace("\"", r"\""))
dollar = "$"
- cmd = "ssh {} {} {} {} -c \"{}'{}'\"".format(
- self.ssh_connection_options,
- self.ssh_options,
- self.user_host,
- self.shell_binary,
- dollar,
- operation_param)
+ cmd = "ssh {} {} {} {} -c \"{}'{}'\"".format(self.ssh_connection_options,
+ self.ssh_options, self.user_host,
+ self.shell_binary, dollar, operation_param)
cmds.append(cmd)
elif operation_type == "copy_to":
@@ -182,16 +163,15 @@ class RemoteOperations(object):
elif operation_type == "copy_from":
operation_dir = operation_dir if operation_dir else "."
if not os.path.isdir(operation_dir):
- raise ValueError(
- "Local directory '{}' does not exist.".format(operation_dir))
+ raise ValueError("Local directory '{}' does not exist.".format(operation_dir))
# We support multiple files being copied from the remote host
# by invoking scp for each file specified.
# Note - this is a method which scp does not support directly.
for copy_file in operation_param:
copy_file = posix_path(copy_file)
- cmd = "scp -r {} {} {}:".format(
- self.ssh_connection_options, self.scp_options, self.user_host)
+ cmd = "scp -r {} {} {}:".format(self.ssh_connection_options, self.scp_options,
+ self.user_host)
# Quote (on Posix), and escape the file if there are spaces.
# Note - we do not support other non-ASCII characters in a file name.
quote = "\"" if not _IS_WINDOWS else ""
@@ -202,9 +182,8 @@ class RemoteOperations(object):
cmds.append(cmd)
else:
- raise ValueError(
- "Invalid operation '{}' specified, choose from {}.".format(
- operation_type, _OPERATIONS))
+ raise ValueError("Invalid operation '{}' specified, choose from {}.".format(
+ operation_type, _OPERATIONS))
final_ret = 0
buff = ""
@@ -217,24 +196,18 @@ class RemoteOperations(object):
def shell(self, operation_param, operation_dir=None):
""" Helper for remote shell operations. """
- return self.operation(
- operation_type="shell",
- operation_param=operation_param,
- operation_dir=operation_dir)
+ return self.operation(operation_type="shell", operation_param=operation_param,
+ operation_dir=operation_dir)
def copy_to(self, operation_param, operation_dir=None):
""" Helper for remote copy_to operations. """
- return self.operation(
- operation_type="copy_to",
- operation_param=operation_param,
- operation_dir=operation_dir)
+ return self.operation(operation_type="copy_to", operation_param=operation_param,
+ operation_dir=operation_dir)
def copy_from(self, operation_param, operation_dir=None):
""" Helper for remote copy_from operations. """
- return self.operation(
- operation_type="copy_from",
- operation_param=operation_param,
- operation_dir=operation_dir)
+ return self.operation(operation_type="copy_from", operation_param=operation_param,
+ operation_dir=operation_dir)
def main():
@@ -245,114 +218,77 @@ def main():
shell_options = optparse.OptionGroup(parser, "Shell options")
copy_options = optparse.OptionGroup(parser, "Copy options")
- parser.add_option("--userHost",
- dest="user_host",
- default=None,
- help="User and remote host to execute commands on [REQUIRED]."
- " Examples, 'user@1.2.3.4' or 'user@myhost.com'.")
-
- parser.add_option("--operation",
- dest="operation",
- default="shell",
- choices=_OPERATIONS,
- help="Remote operation to perform, choose one of '{}',"
- " defaults to '%default'.".format(", ".join(_OPERATIONS)))
-
- control_options.add_option("--sshConnectionOptions",
- dest="ssh_connection_options",
- default=None,
- action="append",
- help="SSH connection options which are common to ssh and scp."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-i $HOME/.ssh/access.pem -o ConnectTimeout=10"
- " -o ConnectionAttempts=10'")
-
- control_options.add_option("--sshOptions",
- dest="ssh_options",
- default=None,
- action="append",
- help="SSH specific options."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-t' or '-T'")
-
- control_options.add_option("--scpOptions",
- dest="scp_options",
- default=None,
- action="append",
- help="SCP specific options."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-l 5000'")
-
- control_options.add_option("--retries",
- dest="retries",
- type=int,
- default=0,
- help="Number of retries to attempt for operation,"
- " defaults to '%default'.")
-
- control_options.add_option("--retrySleep",
- dest="retry_sleep",
- type=int,
- default=10,
- help="Number of seconds to wait between retries,"
- " defaults to '%default'.")
-
- control_options.add_option("--debug",
- dest="debug",
- action="store_true",
- default=False,
+ parser.add_option("--userHost", dest="user_host", default=None,
+ help=("User and remote host to execute commands on [REQUIRED]."
+ " Examples, 'user@1.2.3.4' or 'user@myhost.com'."))
+
+ parser.add_option("--operation", dest="operation", default="shell", choices=_OPERATIONS,
+ help=("Remote operation to perform, choose one of '{}',"
+ " defaults to '%default'.".format(", ".join(_OPERATIONS))))
+
+ control_options.add_option("--sshConnectionOptions", dest="ssh_connection_options",
+ default=None, action="append",
+ help=("SSH connection options which are common to ssh and scp."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-i $HOME/.ssh/access.pem -o ConnectTimeout=10"
+ " -o ConnectionAttempts=10'"))
+
+ control_options.add_option("--sshOptions", dest="ssh_options", default=None, action="append",
+ help=("SSH specific options."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-t' or '-T'"))
+
+ control_options.add_option("--scpOptions", dest="scp_options", default=None, action="append",
+ help=("SCP specific options."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-l 5000'"))
+
+ control_options.add_option("--retries", dest="retries", type=int, default=0,
+ help=("Number of retries to attempt for operation,"
+ " defaults to '%default'."))
+
+ control_options.add_option("--retrySleep", dest="retry_sleep", type=int, default=10,
+ help=("Number of seconds to wait between retries,"
+ " defaults to '%default'."))
+
+ control_options.add_option("--debug", dest="debug", action="store_true", default=False,
help="Provides debug output.")
- control_options.add_option("--verbose",
- dest="verbose",
- action="store_true",
- default=False,
+ control_options.add_option("--verbose", dest="verbose", action="store_true", default=False,
help="Print exit status and output at end.")
- shell_options.add_option("--commands",
- dest="remote_commands",
- default=None,
- action="append",
- help="Commands to excute on the remote host. The"
- " commands must be separated by a ';' and can either"
- " be specifed in a quoted string or by specifying"
- " this option more than once. A ';' will be added"
- " between commands when this option is specifed"
- " more than once.")
-
- shell_options.add_option("--commandDir",
- dest="command_dir",
- default=None,
- help="Working directory on remote to execute commands"
- " form. Defaults to remote login directory.")
-
- copy_options.add_option("--file",
- dest="files",
- default=None,
- action="append",
- help="The file to copy to/from remote host. To"
- " support spaces in the file, each file must be"
- " specified using this option more than once.")
-
- copy_options.add_option("--remoteDir",
- dest="remote_dir",
- default=None,
- help="Remote directory to copy to, only applies when"
- " operation is 'copy_to'. Defaults to the login"
- " directory on the remote host.")
-
- copy_options.add_option("--localDir",
- dest="local_dir",
- default=".",
- help="Local directory to copy to, only applies when"
- " operation is 'copy_from'. Defaults to the"
- " current directory, '%default'.")
+ shell_options.add_option("--commands", dest="remote_commands", default=None, action="append",
+ help=("Commands to excute on the remote host. The"
+ " commands must be separated by a ';' and can either"
+ " be specifed in a quoted string or by specifying"
+ " this option more than once. A ';' will be added"
+ " between commands when this option is specifed"
+ " more than once."))
+
+ shell_options.add_option("--commandDir", dest="command_dir", default=None,
+ help=("Working directory on remote to execute commands"
+ " form. Defaults to remote login directory."))
+
+ copy_options.add_option("--file", dest="files", default=None, action="append",
+ help=("The file to copy to/from remote host. To"
+ " support spaces in the file, each file must be"
+ " specified using this option more than once."))
+
+ copy_options.add_option("--remoteDir", dest="remote_dir", default=None,
+ help=("Remote directory to copy to, only applies when"
+ " operation is 'copy_to'. Defaults to the login"
+ " directory on the remote host."))
+
+ copy_options.add_option("--localDir", dest="local_dir", default=".",
+ help=("Local directory to copy to, only applies when"
+ " operation is 'copy_from'. Defaults to the"
+ " current directory, '%default'."))
parser.add_option_group(control_options)
parser.add_option_group(shell_options)
@@ -367,15 +303,14 @@ def main():
if options.operation == "shell":
if not getattr(options, "remote_commands", None):
parser.print_help()
- parser.error("Missing required '{}' option '{}'".format(
- options.operation, "--commands"))
+ parser.error("Missing required '{}' option '{}'".format(options.operation,
+ "--commands"))
operation_param = ";".join(options.remote_commands)
operation_dir = options.command_dir
else:
if not getattr(options, "files", None):
parser.print_help()
- parser.error("Missing required '{}' option '{}'".format(
- options.operation, "--file"))
+ parser.error("Missing required '{}' option '{}'".format(options.operation, "--file"))
operation_param = options.files
if options.operation == "copy_to":
operation_dir = options.remote_dir
@@ -398,13 +333,9 @@ def main():
scp_options = " ".join(options.scp_options)
remote_op = RemoteOperations(
- user_host=options.user_host,
- ssh_connection_options=ssh_connection_options,
- ssh_options=ssh_options,
- scp_options=scp_options,
- retries=options.retries,
- retry_sleep=options.retry_sleep,
- debug=options.debug)
+ user_host=options.user_host, ssh_connection_options=ssh_connection_options,
+ ssh_options=ssh_options, scp_options=scp_options, retries=options.retries,
+ retry_sleep=options.retry_sleep, debug=options.debug)
ret_code, buffer = remote_op.operation(options.operation, operation_param, operation_dir)
if options.verbose:
print("Return code: {} for command {}".format(ret_code, sys.argv))
diff --git a/buildscripts/requirements.txt b/buildscripts/requirements.txt
index 6a6aec669a1..2218745ec09 100644
--- a/buildscripts/requirements.txt
+++ b/buildscripts/requirements.txt
@@ -6,7 +6,7 @@ pyjwt == 1.5.3
pyyaml == 3.11
unittest-xml-reporting == 2.1.0
# Linters
-yapf == 0.16.0
+yapf == 0.21.0
mypy == 0.501 ; python_version > "3"
# typing in Python 2 for mypy
typing == 3.6.1; python_version < "3"
diff --git a/buildscripts/resmoke.py b/buildscripts/resmoke.py
index 4e59bc183a0..798e11e2c46 100755
--- a/buildscripts/resmoke.py
+++ b/buildscripts/resmoke.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Command line utility for executing MongoDB tests of all kinds.
"""
@@ -70,8 +69,7 @@ def _execute_suite(suite):
archive = resmokelib.utils.archival.Archival(
archival_json_file=resmokelib.config.ARCHIVE_FILE,
limit_size_mb=resmokelib.config.ARCHIVE_LIMIT_MB,
- limit_files=resmokelib.config.ARCHIVE_LIMIT_TESTS,
- logger=logger)
+ limit_files=resmokelib.config.ARCHIVE_LIMIT_TESTS, logger=logger)
executor_config = suite.get_executor_config()
executor = resmokelib.testing.executor.TestSuiteExecutor(
@@ -88,8 +86,8 @@ def _execute_suite(suite):
suite.return_code = 74 # Exit code for IOError on POSIX systems.
return True
except:
- logger.exception("Encountered an error when running %ss of suite %s.",
- suite.test_kind, suite.get_display_name())
+ logger.exception("Encountered an error when running %ss of suite %s.", suite.test_kind,
+ suite.get_display_name())
suite.return_code = 2
return False
finally:
@@ -169,8 +167,7 @@ class Main(object):
"""
return resmokelib.suitesconfig.get_suites(
- suite_files=self.__values.suite_files.split(","),
- test_files=self.__args)
+ suite_files=self.__values.suite_files.split(","), test_files=self.__args)
def run(self):
"""
@@ -209,8 +206,8 @@ class Main(object):
suites_by_test = find_suites_by_test(suites)
for test in sorted(suites_by_test):
suite_names = suites_by_test[test]
- resmoke_logger.info("%s will be run by the following suite(s): %s",
- test, suite_names)
+ resmoke_logger.info("%s will be run by the following suite(s): %s", test,
+ suite_names)
sys.exit(0)
try:
@@ -222,8 +219,8 @@ class Main(object):
suite.record_suite_end()
resmoke_logger.info("=" * 80)
- resmoke_logger.info("Summary of %s suite: %s",
- suite.get_display_name(), _summarize_suite(suite))
+ resmoke_logger.info("Summary of %s suite: %s", suite.get_display_name(),
+ _summarize_suite(suite))
if interrupted or (suite.options.fail_fast and suite.return_code != 0):
time_taken = time.time() - self.__start_time
diff --git a/buildscripts/resmokeconfig/loggers/__init__.py b/buildscripts/resmokeconfig/loggers/__init__.py
index 6511d496364..1cecd4d110e 100644
--- a/buildscripts/resmokeconfig/loggers/__init__.py
+++ b/buildscripts/resmokeconfig/loggers/__init__.py
@@ -33,4 +33,5 @@ def _get_named_loggers():
return named_loggers
+
NAMED_LOGGERS = _get_named_loggers()
diff --git a/buildscripts/resmokeconfig/suites/__init__.py b/buildscripts/resmokeconfig/suites/__init__.py
index e075dd22e0d..4cb601ba9d9 100644
--- a/buildscripts/resmokeconfig/suites/__init__.py
+++ b/buildscripts/resmokeconfig/suites/__init__.py
@@ -33,4 +33,5 @@ def _get_named_suites():
return named_suites
+
NAMED_SUITES = _get_named_suites()
diff --git a/buildscripts/resmokelib/__init__.py b/buildscripts/resmokelib/__init__.py
index abebdc3c665..c6a0bc9d079 100644
--- a/buildscripts/resmokelib/__init__.py
+++ b/buildscripts/resmokelib/__init__.py
@@ -1,3 +1,5 @@
+"""Empty."""
+
from __future__ import absolute_import
from . import config
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
index 621431b71ba..4c8f0a4e720 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -10,7 +10,6 @@ import itertools
import os.path
import time
-
# Subdirectory under the dbpath prefix that contains directories with data files of mongod's started
# by resmoke.py.
FIXTURE_SUBDIR = "resmoke"
@@ -166,14 +165,15 @@ class SuiteOptions(_SuiteOptions):
description = None
include_tags = None
- parent = dict(zip(SuiteOptions._fields, [
- description,
- FAIL_FAST,
- include_tags,
- JOBS,
- REPEAT,
- REPORT_FAILURE_STATUS,
- ]))
+ parent = dict(
+ zip(SuiteOptions._fields, [
+ description,
+ FAIL_FAST,
+ include_tags,
+ JOBS,
+ REPEAT,
+ REPORT_FAILURE_STATUS,
+ ]))
options = self._asdict()
for field in SuiteOptions._fields:
@@ -183,8 +183,8 @@ class SuiteOptions(_SuiteOptions):
return SuiteOptions(**options)
-SuiteOptions.ALL_INHERITED = SuiteOptions(**dict(zip(SuiteOptions._fields,
- itertools.repeat(SuiteOptions.INHERIT))))
+SuiteOptions.ALL_INHERITED = SuiteOptions(**dict(
+ zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
##
# Variables that are set by the user at the command line or with --options.
@@ -380,7 +380,5 @@ DEFAULT_INTEGRATION_TEST_LIST = "build/integration_tests.txt"
# External files or executables, used as suite selectors, that are created during the build and
# therefore might not be available when creating a test membership map.
-EXTERNAL_SUITE_SELECTORS = (DEFAULT_BENCHMARK_TEST_LIST,
- DEFAULT_UNIT_TEST_LIST,
- DEFAULT_INTEGRATION_TEST_LIST,
- DEFAULT_DBTEST_EXECUTABLE)
+EXTERNAL_SUITE_SELECTORS = (DEFAULT_BENCHMARK_TEST_LIST, DEFAULT_UNIT_TEST_LIST,
+ DEFAULT_INTEGRATION_TEST_LIST, DEFAULT_DBTEST_EXECUTABLE)
diff --git a/buildscripts/resmokelib/core/network.py b/buildscripts/resmokelib/core/network.py
index 396da4e4935..eda2c95417e 100644
--- a/buildscripts/resmokelib/core/network.py
+++ b/buildscripts/resmokelib/core/network.py
@@ -49,7 +49,7 @@ class PortAllocator(object):
"""
# A PortAllocator will not return any port greater than this number.
- MAX_PORT = 2 ** 16 - 1
+ MAX_PORT = 2**16 - 1
# Each job gets a contiguous range of _PORTS_PER_JOB ports, with job 0 getting the first block
# of ports, job 1 getting the second block, and so on.
@@ -83,8 +83,8 @@ class PortAllocator(object):
if next_port >= start_port + cls._PORTS_PER_FIXTURE:
raise errors.PortAllocationError(
- "Fixture has requested more than the %d ports reserved per fixture"
- % cls._PORTS_PER_FIXTURE)
+ "Fixture has requested more than the %d ports reserved per fixture" %
+ cls._PORTS_PER_FIXTURE)
return next_port
diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py
index 03fb8496163..42f9454bd91 100644
--- a/buildscripts/resmokelib/core/process.py
+++ b/buildscripts/resmokelib/core/process.py
@@ -29,8 +29,7 @@ if os.name == "posix" and sys.version_info[0] == 2:
import warnings
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
" available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
+ " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
import subprocess
else:
import subprocess
@@ -74,8 +73,7 @@ if sys.platform == "win32":
win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
# Update the limits of the job object.
- win32job.SetInformationJobObject(job_object,
- win32job.JobObjectExtendedLimitInformation,
+ win32job.SetInformationJobObject(job_object, win32job.JobObjectExtendedLimitInformation,
job_info)
return job_object
@@ -138,13 +136,9 @@ class Process(object):
close_fds = (sys.platform != "win32")
with _POPEN_LOCK:
- self._process = subprocess.Popen(self.args,
- bufsize=buffer_size,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- close_fds=close_fds,
- env=self.env,
- creationflags=creation_flags)
+ self._process = subprocess.Popen(self.args, bufsize=buffer_size, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, close_fds=close_fds,
+ env=self.env, creationflags=creation_flags)
self.pid = self._process.pid
self._stdout_pipe = pipe.LoggerPipe(self.logger, logging.INFO, self._process.stdout)
@@ -173,16 +167,15 @@ class Process(object):
mongo_signal_handle = None
try:
mongo_signal_handle = win32event.OpenEvent(
- win32event.EVENT_MODIFY_STATE, False, "Global\\Mongo_" +
- str(self._process.pid))
+ win32event.EVENT_MODIFY_STATE, False,
+ "Global\\Mongo_" + str(self._process.pid))
if not mongo_signal_handle:
# The process has already died.
return
win32event.SetEvent(mongo_signal_handle)
# Wait 60 seconds for the program to exit.
- status = win32event.WaitForSingleObject(
- self._process._handle, 60 * 1000)
+ status = win32event.WaitForSingleObject(self._process._handle, 60 * 1000)
if status == win32event.WAIT_OBJECT_0:
return
except win32process.error as err:
diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py
index 01bba202023..56a5defc5a4 100644
--- a/buildscripts/resmokelib/logging/buildlogger.py
+++ b/buildscripts/resmokelib/logging/buildlogger.py
@@ -12,7 +12,6 @@ import requests
from . import handlers
from .. import config as _config
-
CREATE_BUILD_ENDPOINT = "/build"
APPEND_GLOBAL_LOGS_ENDPOINT = "/build/%(build_id)s"
CREATE_TEST_ENDPOINT = "/build/%(build_id)s/test"
@@ -94,10 +93,7 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
handler for the test logs.
"""
- def __init__(self,
- build_config,
- endpoint,
- capacity=_SEND_AFTER_LINES,
+ def __init__(self, build_config, endpoint, capacity=_SEND_AFTER_LINES,
interval_secs=_SEND_AFTER_SECS):
"""
Initializes the buildlogger handler with the build id and
@@ -213,8 +209,8 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
Buildlogger handler for the test logs.
"""
- def __init__(self, build_config, build_id, test_id,
- capacity=_SEND_AFTER_LINES, interval_secs=_SEND_AFTER_SECS):
+ def __init__(self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES,
+ interval_secs=_SEND_AFTER_SECS):
"""Initializes the buildlogger handler with the credentials, build id, and test id."""
endpoint = APPEND_TEST_LOGS_ENDPOINT % {
"build_id": build_id,
@@ -249,8 +245,8 @@ class BuildloggerGlobalHandler(_BaseBuildloggerHandler):
Buildlogger handler for the global logs.
"""
- def __init__(self, build_config, build_id,
- capacity=_SEND_AFTER_LINES, interval_secs=_SEND_AFTER_SECS):
+ def __init__(self, build_config, build_id, capacity=_SEND_AFTER_LINES,
+ interval_secs=_SEND_AFTER_SECS):
"""Initializes the buildlogger handler with the credentials and build id."""
endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
_BaseBuildloggerHandler.__init__(self, build_config, endpoint, capacity, interval_secs)
@@ -289,10 +285,8 @@ class BuildloggerServer(object):
builder = "%s_%s" % (self.config["builder"], suffix)
build_num = int(self.config["build_num"])
- handler = handlers.HTTPHandler(
- url_root=_config.BUILDLOGGER_URL,
- username=username,
- password=password)
+ handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL, username=username,
+ password=password)
response = handler.post(CREATE_BUILD_ENDPOINT, data={
"builder": builder,
@@ -307,18 +301,18 @@ class BuildloggerServer(object):
"""
Returns a new test id for sending test logs to.
"""
- handler = handlers.HTTPHandler(
- url_root=_config.BUILDLOGGER_URL,
- username=self.config["username"],
- password=self.config["password"])
+ handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL,
+ username=self.config["username"],
+ password=self.config["password"])
endpoint = CREATE_TEST_ENDPOINT % {"build_id": build_id}
- response = handler.post(endpoint, data={
- "test_filename": test_filename,
- "command": test_command,
- "phase": self.config.get("build_phase", "unknown"),
- "task_id": _config.EVERGREEN_TASK_ID,
- })
+ response = handler.post(
+ endpoint, data={
+ "test_filename": test_filename,
+ "command": test_command,
+ "phase": self.config.get("build_phase", "unknown"),
+ "task_id": _config.EVERGREEN_TASK_ID,
+ })
return response["id"]
diff --git a/buildscripts/resmokelib/logging/flush.py b/buildscripts/resmokelib/logging/flush.py
index 18c3a370fbe..5b2b488e51a 100644
--- a/buildscripts/resmokelib/logging/flush.py
+++ b/buildscripts/resmokelib/logging/flush.py
@@ -11,7 +11,6 @@ import time
from ..utils import scheduler
-
_FLUSH_THREAD_LOCK = threading.Lock()
_FLUSH_THREAD = None
diff --git a/buildscripts/resmokelib/logging/formatters.py b/buildscripts/resmokelib/logging/formatters.py
index 4cc36da32d4..058c6d512c8 100644
--- a/buildscripts/resmokelib/logging/formatters.py
+++ b/buildscripts/resmokelib/logging/formatters.py
@@ -42,7 +42,7 @@ class ISO8601Formatter(logging.Formatter):
# The offset is positive if the local timezone is behind (east of) UTC, and negative if it
# is ahead (west) of UTC.
- utc_offset_prefix = "-" if utc_offset_secs > 0 else "+"
+ utc_offset_prefix = "-" if utc_offset_secs > 0 else "+"
utc_offset_secs = abs(utc_offset_secs)
utc_offset_mins = (utc_offset_secs / 60) % 60
diff --git a/buildscripts/resmokelib/logging/handlers.py b/buildscripts/resmokelib/logging/handlers.py
index 8a0bca26fbb..28e35d25a09 100644
--- a/buildscripts/resmokelib/logging/handlers.py
+++ b/buildscripts/resmokelib/logging/handlers.py
@@ -225,12 +225,8 @@ class HTTPHandler(object):
# that defined InsecureRequestWarning.
pass
- response = requests.post(url,
- data=data,
- headers=headers,
- timeout=timeout_secs,
- auth=self.auth_handler,
- verify=should_validate_certificates)
+ response = requests.post(url, data=data, headers=headers, timeout=timeout_secs,
+ auth=self.auth_handler, verify=should_validate_certificates)
response.raise_for_status()
diff --git a/buildscripts/resmokelib/logging/loggers.py b/buildscripts/resmokelib/logging/loggers.py
index 28da3b9744c..a53186b7aa3 100644
--- a/buildscripts/resmokelib/logging/loggers.py
+++ b/buildscripts/resmokelib/logging/loggers.py
@@ -40,8 +40,8 @@ def configure_loggers(logging_config):
fixture_logger = FixtureRootLogger(logging_config, build_logger_server)
tests_logger = TestsRootLogger(logging_config, build_logger_server)
global EXECUTOR_LOGGER
- EXECUTOR_LOGGER = ExecutorRootLogger(logging_config, build_logger_server,
- fixture_logger, tests_logger)
+ EXECUTOR_LOGGER = ExecutorRootLogger(logging_config, build_logger_server, fixture_logger,
+ tests_logger)
class BaseLogger(logging.Logger):
@@ -50,6 +50,7 @@ class BaseLogger(logging.Logger):
Custom loggers share access to the logging configuration and provide methods
to create other loggers.
"""
+
def __init__(self, name, logging_config=None, build_logger_server=None, parent=None):
"""Initialize a BaseLogger.
@@ -93,6 +94,7 @@ class BaseLogger(logging.Logger):
class RootLogger(BaseLogger):
"""A custom class for top-level loggers (executor, fixture, tests)."""
+
def __init__(self, name, logging_config, build_logger_server):
"""Initialize a RootLogger.
@@ -115,8 +117,8 @@ class RootLogger(BaseLogger):
def _add_handler(self, handler_info, formatter):
handler_class = handler_info["class"]
if handler_class == "logging.FileHandler":
- handler = logging.FileHandler(filename=handler_info["filename"],
- mode=handler_info.get("mode", "w"))
+ handler = logging.FileHandler(filename=handler_info["filename"], mode=handler_info.get(
+ "mode", "w"))
elif handler_class == "logging.NullHandler":
handler = logging.NullHandler()
elif handler_class == "logging.StreamHandler":
@@ -131,6 +133,7 @@ class RootLogger(BaseLogger):
class ExecutorRootLogger(RootLogger):
"""Class for the "executor" top-level logger."""
+
def __init__(self, logging_config, build_logger_server, fixture_root_logger, tests_root_logger):
"""Initialize an ExecutorRootLogger."""
RootLogger.__init__(self, EXECUTOR_LOGGER_NAME, logging_config, build_logger_server)
@@ -231,6 +234,7 @@ class TestLogger(BaseLogger):
class FixtureRootLogger(RootLogger):
"""Class for the "fixture" top-level logger."""
+
def __init__(self, logging_config, build_logger_server):
"""Initialize a FixtureRootLogger.
@@ -289,12 +293,13 @@ class FixtureNodeLogger(BaseLogger):
def new_fixture_node_logger(self, node_name):
"""Create a new child FixtureNodeLogger."""
- return FixtureNodeLogger(self.fixture_class, self.job_num,
- "%s:%s" % (self.node_name, node_name), self)
+ return FixtureNodeLogger(self.fixture_class, self.job_num, "%s:%s" % (self.node_name,
+ node_name), self)
class TestsRootLogger(RootLogger):
"""Class for the "tests" top-level logger."""
+
def __init__(self, logging_config, build_logger_server):
"""Initialize a TestsRootLogger.
@@ -330,6 +335,7 @@ class HookLogger(BaseLogger):
# Util methods
+
def _fallback_buildlogger_handler(include_logger_name=True):
"""
Returns a handler that writes to stderr.
diff --git a/buildscripts/resmokelib/parser.py b/buildscripts/resmokelib/parser.py
index 2dfb4625006..0aeb969688c 100644
--- a/buildscripts/resmokelib/parser.py
+++ b/buildscripts/resmokelib/parser.py
@@ -138,27 +138,26 @@ def parse_command_line():
help=("Enables or disables preallocation of journal files for all mongod"
" processes. Defaults to %default."))
- parser.add_option("--shellConnString", dest="shell_conn_string",
- metavar="CONN_STRING",
+ parser.add_option("--shellConnString", dest="shell_conn_string", metavar="CONN_STRING",
help="Overrides the default fixture and connect to an existing MongoDB"
- " cluster instead. This is useful for connecting to a MongoDB"
- " deployment started outside of resmoke.py including one running in a"
- " debugger.")
+ " cluster instead. This is useful for connecting to a MongoDB"
+ " deployment started outside of resmoke.py including one running in a"
+ " debugger.")
parser.add_option("--shellPort", dest="shell_port", metavar="PORT",
help="Convenience form of --shellConnString for connecting to an"
- " existing MongoDB cluster with the URL mongodb://localhost:[PORT]."
- " This is useful for connecting to a server running in a debugger.")
+ " existing MongoDB cluster with the URL mongodb://localhost:[PORT]."
+ " This is useful for connecting to a server running in a debugger.")
parser.add_option("--repeat", type="int", dest="repeat", metavar="N",
help="Repeats the given suite(s) N times, or until one fails.")
parser.add_option("--reportFailureStatus", type="choice", action="store",
- dest="report_failure_status", choices=("fail", "silentfail"),
- metavar="STATUS",
+ dest="report_failure_status", choices=("fail",
+ "silentfail"), metavar="STATUS",
help="Controls if the test failure status should be reported as failed"
- " or be silently ignored (STATUS=silentfail). Dynamic test failures will"
- " never be silently ignored. Defaults to STATUS=%default.")
+ " or be silently ignored (STATUS=silentfail). Dynamic test failures will"
+ " never be silently ignored. Defaults to STATUS=%default.")
parser.add_option("--reportFile", dest="report_file", metavar="REPORT",
help="Writes a JSON file with test status and timing information.")
@@ -201,7 +200,7 @@ def parse_command_line():
parser.add_option("--storageEngineCacheSizeGB", dest="storage_engine_cache_size_gb",
metavar="CONFIG", help="Sets the storage engine cache size configuration"
- " setting for all mongod's.")
+ " setting for all mongod's.")
parser.add_option("--tagFile", dest="tag_file", metavar="OPTIONS",
help="A YAML file that associates tests and tags.")
@@ -217,11 +216,10 @@ def parse_command_line():
parser.add_option("--executor", dest="executor_file",
help="OBSOLETE: Superceded by --suites; specify --suites=SUITE path/to/test"
- " to run a particular test under a particular suite configuration.")
+ " to run a particular test under a particular suite configuration.")
evergreen_options = optparse.OptionGroup(
- parser,
- title="Evergreen options",
+ parser, title="Evergreen options",
description=("Options used to propagate information about the Evergreen task running this"
" script."))
parser.add_option_group(evergreen_options)
@@ -247,8 +245,7 @@ def parse_command_line():
" patch build."))
evergreen_options.add_option("--projectName", dest="project_name", metavar="PROJECT_NAME",
- help=("Sets the name of the Evergreen project running the tests."
- ))
+ help=("Sets the name of the Evergreen project running the tests."))
evergreen_options.add_option("--revisionOrderId", dest="revision_order_id",
metavar="REVISION_ORDER_ID",
@@ -267,11 +264,8 @@ def parse_command_line():
evergreen_options.add_option("--versionId", dest="version_id", metavar="VERSION_ID",
help="Sets the version ID of the task.")
- benchmark_options = optparse.OptionGroup(
- parser,
- title="Benchmark test options",
- description="Options for running Benchmark tests"
- )
+ benchmark_options = optparse.OptionGroup(parser, title="Benchmark test options",
+ description="Options for running Benchmark tests")
parser.add_option_group(benchmark_options)
@@ -280,8 +274,7 @@ def parse_command_line():
help="Regex to filter benchmark tests to run.")
benchmark_options.add_option("--benchmarkListTests", dest="benchmark_list_tests",
- action="store_true",
- metavar="BENCHMARK_LIST_TESTS",
+ action="store_true", metavar="BENCHMARK_LIST_TESTS",
help="Lists all benchmark test configurations in each test file.")
benchmark_min_time_help = (
@@ -297,16 +290,10 @@ def parse_command_line():
"runs; use --benchmarkMinTimeSecs if you'd like to run a test for a longer or shorter "
"duration.")
benchmark_options.add_option("--benchmarkRepetitions", type="int", dest="benchmark_repetitions",
- metavar="BENCHMARK_REPETITIONS",
- help=benchmark_repetitions_help)
-
- parser.set_defaults(logger_file="console",
- dry_run="off",
- find_suites=False,
- list_suites=False,
- suite_files="with_server",
- prealloc_journal="off",
- shuffle="auto",
+ metavar="BENCHMARK_REPETITIONS", help=benchmark_repetitions_help)
+
+ parser.set_defaults(logger_file="console", dry_run="off", find_suites=False, list_suites=False,
+ suite_files="with_server", prealloc_journal="off", shuffle="auto",
stagger_jobs="off")
options, args = parser.parse_args()
@@ -326,8 +313,8 @@ def validate_options(parser, options, args):
if options.executor_file:
parser.error("--executor is superseded by --suites; specify --suites={} {} to run the"
- " test(s) under those suite configuration(s)"
- .format(options.executor_file, " ".join(args)))
+ " test(s) under those suite configuration(s)".format(
+ options.executor_file, " ".join(args)))
def validate_benchmark_options():
@@ -347,9 +334,7 @@ def validate_benchmark_options():
raise optparse.OptionValueError(
"--jobs=%d cannot be used for benchmark tests. Parallel jobs affect CPU cache access "
"patterns and cause additional context switching, which lead to inaccurate benchmark "
- "results. Please use --jobs=1"
- % _config.JOBS
- )
+ "results. Please use --jobs=1" % _config.JOBS)
def get_logging_config(values):
diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py
index 71308097eb8..4fe1450d945 100644
--- a/buildscripts/resmokelib/selector.py
+++ b/buildscripts/resmokelib/selector.py
@@ -21,7 +21,6 @@ from . import utils
from .utils import globstar
from .utils import jscomment
-
########################
# Test file explorer #
########################
@@ -32,6 +31,7 @@ class TestFileExplorer(object):
The file related code has been confined to this class for testability.
"""
+
def is_glob_pattern(self, path):
"""Indicates if the provided path is a glob pattern.
@@ -139,6 +139,7 @@ class _TestList(object):
glob expansion of paths and check if they are existing files. If not, calling
'include_files()' or 'exclude_files()' will raise an TypeError.
"""
+
def __init__(self, test_file_explorer, roots, tests_are_files=True):
"""Initializes the _TestList with a TestFileExplorer component and a list of root tests."""
self._test_file_explorer = test_file_explorer
@@ -208,13 +209,13 @@ class _TestList(object):
get_tags: a callable object that takes a test and returns the corresponding list of
tags.
"""
- self._filtered = {test for test in self._filtered
- if tag_expression(get_tags(test))}
+ self._filtered = {test for test in self._filtered if tag_expression(get_tags(test))}
def include_any_pattern(self, patterns):
"""
Filters the test list to only include tests that match any of the given glob patterns.
"""
+
def match(test):
for pattern in patterns:
if test == pattern or fnmatch.fnmatchcase(test, pattern):
@@ -244,6 +245,7 @@ class _TestList(object):
# Tag matching expressions #
##############################
+
class _AllOfExpression(object):
"""A tag matching expression that requires all child expressions to match."""
@@ -266,6 +268,7 @@ class _AnyOfExpression(object):
class _NotExpression(object):
"""A tag matching expression that matches if and only if the child expression does not match."""
+
def __init__(self, child):
self.__child = child
@@ -275,6 +278,7 @@ class _NotExpression(object):
class _MatchExpression(object):
"""A tag matching expression that matches when a specific tag is present."""
+
def __init__(self, tag):
self.__tag = tag
@@ -320,10 +324,10 @@ def _make_expression_list(configs):
class _SelectorConfig(object):
"""Base object to represent the configuration for test selection."""
- def __init__(self, root=None, roots=None,
- include_files=None, exclude_files=None,
- include_tags=None, exclude_tags=None,
- include_with_any_tags=None, exclude_with_any_tags=None):
+
+ def __init__(self, root=None, roots=None, include_files=None, exclude_files=None,
+ include_tags=None, exclude_tags=None, include_with_any_tags=None,
+ exclude_with_any_tags=None):
"""
Initializes the _SelectorConfig from the configuration elements.
@@ -353,10 +357,8 @@ class _SelectorConfig(object):
exclude_with_any_tags = self.__merge_lists(exclude_with_any_tags,
config.EXCLUDE_WITH_ANY_TAGS)
- self.tags_expression = self.__make_tags_expression(include_tags,
- exclude_tags,
- include_with_any_tags,
- exclude_with_any_tags)
+ self.tags_expression = self.__make_tags_expression(
+ include_tags, exclude_tags, include_with_any_tags, exclude_with_any_tags)
@staticmethod
def __merge_lists(list_a, list_b):
@@ -371,20 +373,18 @@ class _SelectorConfig(object):
return None
@staticmethod
- def __make_tags_expression(include_tags, exclude_tags,
- include_with_any_tags, exclude_with_any_tags):
+ def __make_tags_expression(include_tags, exclude_tags, include_with_any_tags,
+ exclude_with_any_tags):
expressions = []
if include_tags:
expressions.append(make_expression(include_tags))
elif exclude_tags:
expressions.append(_NotExpression(make_expression(exclude_tags)))
if include_with_any_tags:
- include_with_any_expr = make_expression(
- {"$anyOf": include_with_any_tags})
+ include_with_any_expr = make_expression({"$anyOf": include_with_any_tags})
expressions.append(include_with_any_expr)
if exclude_with_any_tags:
- exclude_with_any_expr = make_expression(
- {"$not": {"$anyOf": exclude_with_any_tags}})
+ exclude_with_any_expr = make_expression({"$not": {"$anyOf": exclude_with_any_tags}})
expressions.append(exclude_with_any_expr)
if expressions:
@@ -395,6 +395,7 @@ class _SelectorConfig(object):
class _Selector(object):
"""Selection algorithm to select tests matching a selector configuration."""
+
def __init__(self, test_file_explorer, tests_are_files=True):
"""
Initializes the _Selector.
@@ -440,12 +441,12 @@ class _Selector(object):
class _JSTestSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for js_test tests."""
- def __init__(self, roots=None,
- include_files=None, exclude_files=None,
- include_with_any_tags=None, exclude_with_any_tags=None,
- include_tags=None, exclude_tags=None):
- _SelectorConfig.__init__(self, roots=roots,
- include_files=include_files, exclude_files=exclude_files,
+
+ def __init__(self, roots=None, include_files=None, exclude_files=None,
+ include_with_any_tags=None, exclude_with_any_tags=None, include_tags=None,
+ exclude_tags=None):
+ _SelectorConfig.__init__(self, roots=roots, include_files=include_files,
+ exclude_files=exclude_files,
include_with_any_tags=include_with_any_tags,
exclude_with_any_tags=exclude_with_any_tags,
include_tags=include_tags, exclude_tags=exclude_tags)
@@ -453,6 +454,7 @@ class _JSTestSelectorConfig(_SelectorConfig):
class _JSTestSelector(_Selector):
"""_Selector subclass for js_test tests."""
+
def __init__(self, test_file_explorer):
_Selector.__init__(self, test_file_explorer)
self._tags = self._test_file_explorer.parse_tag_file("js_test")
@@ -466,20 +468,22 @@ class _JSTestSelector(_Selector):
class _CppTestSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for cpp_integration_test and cpp_unit_test tests."""
- def __init__(self, root=config.DEFAULT_INTEGRATION_TEST_LIST, roots=None,
- include_files=None, exclude_files=None):
+
+ def __init__(self, root=config.DEFAULT_INTEGRATION_TEST_LIST, roots=None, include_files=None,
+ exclude_files=None):
if roots:
# The 'roots' argument is only present when tests are specified on the command line
# and in that case they take precedence over the tests in the root file.
- _SelectorConfig.__init__(self, roots=roots,
- include_files=include_files, exclude_files=exclude_files)
+ _SelectorConfig.__init__(self, roots=roots, include_files=include_files,
+ exclude_files=exclude_files)
else:
- _SelectorConfig.__init__(self, root=root,
- include_files=include_files, exclude_files=exclude_files)
+ _SelectorConfig.__init__(self, root=root, include_files=include_files,
+ exclude_files=exclude_files)
class _CppTestSelector(_Selector):
"""_Selector subclass for cpp_integration_test and cpp_unit_test tests."""
+
def __init__(self, test_file_explorer):
_Selector.__init__(self, test_file_explorer)
@@ -494,6 +498,7 @@ class _CppTestSelector(_Selector):
class _DbTestSelectorConfig(_SelectorConfig):
"""_Selector config subclass for db_test tests."""
+
def __init__(self, binary=None, roots=None, include_suites=None):
_SelectorConfig.__init__(self, roots=roots)
self.include_suites = utils.default_if_none(include_suites, [])
@@ -510,6 +515,7 @@ class _DbTestSelectorConfig(_SelectorConfig):
class _DbTestSelector(_Selector):
"""_Selector subclass for db_test tests."""
+
def __init__(self, test_file_explorer):
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
@@ -542,19 +548,22 @@ class _DbTestSelector(_Selector):
class _JsonSchemaTestSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for json_schema_test tests."""
+
def __init__(self, roots, include_files=None, exclude_files=None):
- _SelectorConfig.__init__(self, roots=roots,
- include_files=include_files, exclude_files=exclude_files)
+ _SelectorConfig.__init__(self, roots=roots, include_files=include_files,
+ exclude_files=exclude_files)
class _SleepTestCaseSelectorConfig(_SelectorConfig):
"""_SelectorConfig subclass for sleep_test tests."""
+
def __init__(self, roots):
_SelectorConfig.__init__(self, roots=roots)
class _SleepTestCaseSelector(_Selector):
"""_Selector subclass for sleep_test tests."""
+
def __init__(self, test_file_explorer):
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
@@ -565,7 +574,6 @@ class _SleepTestCaseSelector(_Selector):
_DEFAULT_TEST_FILE_EXPLORER = TestFileExplorer()
-
_SELECTOR_REGISTRY = {
"cpp_integration_test": (_CppTestSelectorConfig, _CppTestSelector),
"cpp_unit_test": (_CppTestSelectorConfig, _CppTestSelector),
diff --git a/buildscripts/resmokelib/sighandler.py b/buildscripts/resmokelib/sighandler.py
index 7058b6259b7..5da9ae52ca1 100644
--- a/buildscripts/resmokelib/sighandler.py
+++ b/buildscripts/resmokelib/sighandler.py
@@ -66,7 +66,6 @@ def register(logger, suites, start_time):
testing.suite.Suite.log_summaries(logger, suites, time.time() - start_time)
-
# On Windows spawn a thread to wait on an event object for signal to dump stacks. For Cygwin
# platforms, we use a signal handler since it supports POSIX signals.
if _is_windows:
@@ -77,10 +76,8 @@ def register(logger, suites, start_time):
security_attributes = None
manual_reset = False
initial_state = False
- task_timeout_handle = win32event.CreateEvent(security_attributes,
- manual_reset,
- initial_state,
- event_name)
+ task_timeout_handle = win32event.CreateEvent(security_attributes, manual_reset,
+ initial_state, event_name)
except win32event.error as err:
logger.error("Exception from win32event.CreateEvent with error: %s" % err)
return
diff --git a/buildscripts/resmokelib/suitesconfig.py b/buildscripts/resmokelib/suitesconfig.py
index dc668035c90..18c52683661 100644
--- a/buildscripts/resmokelib/suitesconfig.py
+++ b/buildscripts/resmokelib/suitesconfig.py
@@ -71,9 +71,9 @@ def get_suites(suite_files, test_files):
if test_files:
# Do not change the execution order of the tests passed as args, unless a tag option is
# specified. If an option is specified, then sort the tests for consistent execution order.
- _config.ORDER_TESTS_BY_NAME = any(tag_filter is not None for
- tag_filter in (_config.EXCLUDE_WITH_ANY_TAGS,
- _config.INCLUDE_WITH_ANY_TAGS))
+ _config.ORDER_TESTS_BY_NAME = any(
+ tag_filter is not None
+ for tag_filter in (_config.EXCLUDE_WITH_ANY_TAGS, _config.INCLUDE_WITH_ANY_TAGS))
# Build configuration for list of files to run.
suite_roots = _make_suite_roots(test_files)
@@ -109,6 +109,6 @@ def _get_yaml_config(kind, pathname):
pathname = resmokeconfig.NAMED_SUITES[pathname] # Expand 'pathname' to full path.
if not utils.is_yaml_file(pathname) or not os.path.isfile(pathname):
- raise optparse.OptionValueError("Expected a %s YAML config, but got '%s'"
- % (kind, pathname))
+ raise optparse.OptionValueError("Expected a %s YAML config, but got '%s'" % (kind,
+ pathname))
return utils.load_yaml_file(pathname)
diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py
index 63ff606d2f1..f66515ac8b0 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -30,14 +30,8 @@ class TestSuiteExecutor(object):
_TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
- def __init__(self,
- exec_logger,
- suite,
- config=None,
- fixture=None,
- hooks=None,
- archive_instance=None,
- archive=None):
+ def __init__(self, exec_logger, suite, config=None, fixture=None, hooks=None,
+ archive_instance=None, archive=None):
"""
Initializes the TestSuiteExecutor with the test suite to run.
"""
@@ -55,8 +49,8 @@ class TestSuiteExecutor(object):
self.archival = None
if archive_instance:
- self.archival = archival.HookTestArchival(
- suite, self.hooks_config, archive_instance, archive)
+ self.archival = archival.HookTestArchival(suite, self.hooks_config, archive_instance,
+ archive)
self._suite = suite
@@ -147,8 +141,7 @@ class TestSuiteExecutor(object):
try:
job.fixture.setup()
except:
- self.logger.exception(
- "Encountered an error while setting up %s.", job.fixture)
+ self.logger.exception("Encountered an error while setting up %s.", job.fixture)
return False
# Once they have all been started, wait for them to become available.
@@ -156,8 +149,8 @@ class TestSuiteExecutor(object):
try:
job.fixture.await_ready()
except:
- self.logger.exception(
- "Encountered an error while waiting for %s to be ready", job.fixture)
+ self.logger.exception("Encountered an error while waiting for %s to be ready",
+ job.fixture)
return False
return True
@@ -177,8 +170,7 @@ class TestSuiteExecutor(object):
try:
# Run each Job instance in its own thread.
for job in self._jobs:
- t = threading.Thread(target=job,
- args=(test_queue, interrupt_flag),
+ t = threading.Thread(target=job, args=(test_queue, interrupt_flag),
kwargs=dict(teardown_flag=teardown_flag))
# Do not wait for tests to finish executing if interrupted by the user.
t.daemon = True
@@ -258,10 +250,7 @@ class TestSuiteExecutor(object):
hook_class = hook_config.pop("class")
hook_logger = self.logger.new_hook_logger(hook_class, fixture.logger)
- hook = _hooks.make_hook(hook_class,
- hook_logger,
- fixture,
- **hook_config)
+ hook = _hooks.make_hook(hook_class, hook_logger, fixture, **hook_config)
hooks.append(hook)
return hooks
@@ -278,12 +267,7 @@ class TestSuiteExecutor(object):
report = _report.TestReport(job_logger, self._suite.options)
- return _job.Job(job_logger,
- fixture,
- hooks,
- report,
- self.archival,
- self._suite.options)
+ return _job.Job(job_logger, fixture, hooks, report, self.archival, self._suite.options)
def _make_test_queue(self):
"""
@@ -297,10 +281,8 @@ class TestSuiteExecutor(object):
# Put all the test cases in a queue.
queue = _queue.Queue()
for test_name in self._suite.tests:
- test_case = testcases.make_test_case(self._suite.test_kind,
- test_queue_logger,
- test_name,
- **self.test_config)
+ test_case = testcases.make_test_case(self._suite.test_kind, test_queue_logger,
+ test_name, **self.test_config)
queue.put(test_case)
# Add sentinel value for each job to indicate when there are no more items to process.
diff --git a/buildscripts/resmokelib/testing/fixtures/__init__.py b/buildscripts/resmokelib/testing/fixtures/__init__.py
index d8c3d8a2b53..e59a05c9754 100644
--- a/buildscripts/resmokelib/testing/fixtures/__init__.py
+++ b/buildscripts/resmokelib/testing/fixtures/__init__.py
@@ -8,10 +8,8 @@ from .interface import NoOpFixture as _NoOpFixture
from .interface import make_fixture
from ...utils import autoloader as _autoloader
-
NOOP_FIXTURE_CLASS = _NoOpFixture.REGISTERED_NAME
-
# We dynamically load all modules in the fixtures/ package so that any Fixture classes declared
# within them are automatically registered.
_autoloader.load_all_modules(name=__name__, path=__path__)
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py
index 64d24be1eb6..0d80907e680 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -16,7 +16,6 @@ from ... import logging
from ... import utils
from ...utils import registry
-
_FIXTURES = {}
@@ -145,8 +144,7 @@ class Fixture(object):
kwargs["connect"] = True
return pymongo.MongoClient(host=self.get_driver_connection_url(),
- read_preference=read_preference,
- **kwargs)
+ read_preference=read_preference, **kwargs)
def __str__(self):
return "%s (Job #%d)" % (self.__class__.__name__, self.job_num)
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index 5f89cd5fb5d..a554c6a7044 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -25,21 +25,11 @@ class ReplicaSetFixture(interface.ReplFixture):
# Error response codes copied from mongo/base/error_codes.err.
_NODE_NOT_FOUND = 74
- def __init__(self,
- logger,
- job_num,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False,
- num_nodes=2,
- start_initial_sync_node=False,
- write_concern_majority_journal_default=None,
- auth_options=None,
- replset_config_options=None,
- voting_secondaries=None,
- all_nodes_electable=False,
- use_replica_set_connection_string=None):
+ def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
+ dbpath_prefix=None, preserve_dbpath=False, num_nodes=2,
+ start_initial_sync_node=False, write_concern_majority_journal_default=None,
+ auth_options=None, replset_config_options=None, voting_secondaries=None,
+ all_nodes_electable=False, use_replica_set_connection_string=None):
interface.ReplFixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
@@ -117,11 +107,11 @@ class ReplicaSetFixture(interface.ReplFixture):
member_info["votes"] = 0
members.append(member_info)
if self.initial_sync_node:
- members.append({"_id": self.initial_sync_node_idx,
- "host": self.initial_sync_node.get_internal_connection_string(),
- "priority": 0,
- "hidden": 1,
- "votes": 0})
+ members.append({
+ "_id": self.initial_sync_node_idx,
+ "host": self.initial_sync_node.get_internal_connection_string(), "priority": 0,
+ "hidden": 1, "votes": 0
+ })
config = {"_id": self.replset_name}
client = self.nodes[0].mongo_client()
@@ -137,13 +127,13 @@ class ReplicaSetFixture(interface.ReplFixture):
return
if self.write_concern_majority_journal_default is not None:
- config["writeConcernMajorityJournalDefault"] = self.write_concern_majority_journal_default
+ config[
+ "writeConcernMajorityJournalDefault"] = self.write_concern_majority_journal_default
else:
server_status = client.admin.command({"serverStatus": 1})
cmd_line_opts = client.admin.command({"getCmdLineOpts": 1})
- if not (server_status["storageEngine"]["persistent"] and
- cmd_line_opts["parsed"].get("storage", {}).get(
- "journal", {}).get("enabled", True)):
+ if not (server_status["storageEngine"]["persistent"] and cmd_line_opts["parsed"].get(
+ "storage", {}).get("journal", {}).get("enabled", True)):
config["writeConcernMajorityJournalDefault"] = False
if self.replset_config_options.get("configsvr", False):
@@ -326,11 +316,9 @@ class ReplicaSetFixture(interface.ReplFixture):
mongod_options["replSet"] = replset_name
mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "node{}".format(index))
- return standalone.MongoDFixture(mongod_logger,
- self.job_num,
- mongod_executable=self.mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=self.preserve_dbpath)
+ return standalone.MongoDFixture(
+ mongod_logger, self.job_num, mongod_executable=self.mongod_executable,
+ mongod_options=mongod_options, preserve_dbpath=self.preserve_dbpath)
def _get_logger_for_mongod(self, index):
"""
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
index 94c8346371b..5e94b133708 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -29,23 +29,11 @@ class ShardedClusterFixture(interface.Fixture):
_CONFIGSVR_REPLSET_NAME = "config-rs"
_SHARD_REPLSET_NAME_PREFIX = "shard-rs"
- def __init__(self,
- logger,
- job_num,
- mongos_executable=None,
- mongos_options=None,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False,
- num_shards=1,
- num_rs_nodes_per_shard=None,
- separate_configsvr=True,
- enable_sharding=None,
- enable_balancer=True,
- auth_options=None,
- configsvr_options=None,
- shard_options=None):
+ def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None,
+ mongod_executable=None, mongod_options=None, dbpath_prefix=None,
+ preserve_dbpath=False, num_shards=1, num_rs_nodes_per_shard=None,
+ separate_configsvr=True, enable_sharding=None, enable_balancer=True,
+ auth_options=None, configsvr_options=None, shard_options=None):
"""
Initializes ShardedClusterFixture with the different options to
the mongod and mongos processes.
@@ -174,9 +162,9 @@ class ShardedClusterFixture(interface.Fixture):
Returns true if the config server, all shards, and the mongos
are all still operating, and false otherwise.
"""
- return (self.configsvr is not None and self.configsvr.is_running() and
- all(shard.is_running() for shard in self.shards) and
- self.mongos is not None and self.mongos.is_running())
+ return (self.configsvr is not None and self.configsvr.is_running()
+ and all(shard.is_running() for shard in self.shards) and self.mongos is not None
+ and self.mongos.is_running())
def get_internal_connection_string(self):
if self.mongos is None:
@@ -212,15 +200,11 @@ class ShardedClusterFixture(interface.Fixture):
mongod_options["replSet"] = ShardedClusterFixture._CONFIGSVR_REPLSET_NAME
mongod_options["storageEngine"] = "wiredTiger"
- return replicaset.ReplicaSetFixture(mongod_logger,
- self.job_num,
- mongod_executable=mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=preserve_dbpath,
- num_nodes=num_nodes,
- auth_options=auth_options,
- replset_config_options=replset_config_options,
- **configsvr_options)
+ return replicaset.ReplicaSetFixture(
+ mongod_logger, self.job_num, mongod_executable=mongod_executable,
+ mongod_options=mongod_options, preserve_dbpath=preserve_dbpath, num_nodes=num_nodes,
+ auth_options=auth_options, replset_config_options=replset_config_options,
+ **configsvr_options)
def _new_rs_shard(self, index, num_rs_nodes_per_shard):
"""
@@ -245,15 +229,11 @@ class ShardedClusterFixture(interface.Fixture):
mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "shard{}".format(index))
mongod_options["replSet"] = ShardedClusterFixture._SHARD_REPLSET_NAME_PREFIX + str(index)
- return replicaset.ReplicaSetFixture(mongod_logger,
- self.job_num,
- mongod_executable=mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=preserve_dbpath,
- num_nodes=num_rs_nodes_per_shard,
- auth_options=auth_options,
- replset_config_options=replset_config_options,
- **shard_options)
+ return replicaset.ReplicaSetFixture(
+ mongod_logger, self.job_num, mongod_executable=mongod_executable,
+ mongod_options=mongod_options, preserve_dbpath=preserve_dbpath,
+ num_nodes=num_rs_nodes_per_shard, auth_options=auth_options,
+ replset_config_options=replset_config_options, **shard_options)
def _new_standalone_shard(self, index):
"""
@@ -273,12 +253,9 @@ class ShardedClusterFixture(interface.Fixture):
mongod_options["shardsvr"] = ""
mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "shard{}".format(index))
- return standalone.MongoDFixture(mongod_logger,
- self.job_num,
- mongod_executable=mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=preserve_dbpath,
- **shard_options)
+ return standalone.MongoDFixture(
+ mongod_logger, self.job_num, mongod_executable=mongod_executable,
+ mongod_options=mongod_options, preserve_dbpath=preserve_dbpath, **shard_options)
def _new_mongos(self):
"""
@@ -295,9 +272,7 @@ class ShardedClusterFixture(interface.Fixture):
else:
mongos_options["configdb"] = "localhost:{}".format(self.shards[0].port)
- return _MongoSFixture(mongos_logger,
- self.job_num,
- mongos_executable=self.mongos_executable,
+ return _MongoSFixture(mongos_logger, self.job_num, mongos_executable=self.mongos_executable,
mongos_options=mongos_options)
def _add_shard(self, client, shard):
@@ -321,11 +296,7 @@ class _MongoSFixture(interface.Fixture):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
- def __init__(self,
- logger,
- job_num,
- mongos_executable=None,
- mongos_options=None):
+ def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None):
interface.Fixture.__init__(self, logger, job_num)
@@ -342,8 +313,7 @@ class _MongoSFixture(interface.Fixture):
self.mongos_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
self.port = self.mongos_options["port"]
- mongos = core.programs.mongos_program(self.logger,
- executable=self.mongos_executable,
+ mongos = core.programs.mongos_program(self.logger, executable=self.mongos_executable,
**self.mongos_options)
try:
self.logger.info("Starting mongos on port %d...\n%s", self.port, mongos.as_command())
@@ -367,8 +337,8 @@ class _MongoSFixture(interface.Fixture):
exit_code = self.mongos.poll()
if exit_code is not None:
raise errors.ServerFailure("Could not connect to mongos on port {}, process ended"
- " unexpectedly with code {}.".format(self.port,
- exit_code))
+ " unexpectedly with code {}.".format(
+ self.port, exit_code))
try:
# Use a shorter connection timeout to more closely satisfy the requested deadline.
diff --git a/buildscripts/resmokelib/testing/fixtures/standalone.py b/buildscripts/resmokelib/testing/fixtures/standalone.py
index 9b6a5b09e5d..0d761478cd8 100644
--- a/buildscripts/resmokelib/testing/fixtures/standalone.py
+++ b/buildscripts/resmokelib/testing/fixtures/standalone.py
@@ -27,13 +27,8 @@ class MongoDFixture(interface.Fixture):
AWAIT_READY_TIMEOUT_SECS = 300
- def __init__(self,
- logger,
- job_num,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False):
+ def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
+ dbpath_prefix=None, preserve_dbpath=False):
interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
@@ -49,8 +44,7 @@ class MongoDFixture(interface.Fixture):
# The dbpath in mongod_options takes precedence over other settings to make it easier for
# users to specify a dbpath containing data to test against.
if "dbpath" not in self.mongod_options:
- self.mongod_options["dbpath"] = os.path.join(
- self._dbpath_prefix, config.FIXTURE_SUBDIR)
+ self.mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, config.FIXTURE_SUBDIR)
self._dbpath = self.mongod_options["dbpath"]
self.mongod = None
@@ -70,8 +64,7 @@ class MongoDFixture(interface.Fixture):
self.mongod_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
self.port = self.mongod_options["port"]
- mongod = core.programs.mongod_program(self.logger,
- executable=self.mongod_executable,
+ mongod = core.programs.mongod_program(self.logger, executable=self.mongod_executable,
**self.mongod_options)
try:
self.logger.info("Starting mongod on port %d...\n%s", self.port, mongod.as_command())
diff --git a/buildscripts/resmokelib/testing/hook_test_archival.py b/buildscripts/resmokelib/testing/hook_test_archival.py
index 05445208673..315247261d6 100644
--- a/buildscripts/resmokelib/testing/hook_test_archival.py
+++ b/buildscripts/resmokelib/testing/hook_test_archival.py
@@ -92,25 +92,19 @@ class HookTestArchival(object):
# Normalize test path from a test or hook name.
test_path = \
test_name.replace("/", "_").replace("\\", "_").replace(".", "_").replace(":", "_")
- file_name = "mongo-data-{}-{}-{}-{}.tgz".format(
- config.EVERGREEN_TASK_ID,
- test_path,
- config.EVERGREEN_EXECUTION,
- self._tests_repeat[test_name])
+ file_name = "mongo-data-{}-{}-{}-{}.tgz".format(config.EVERGREEN_TASK_ID, test_path,
+ config.EVERGREEN_EXECUTION,
+ self._tests_repeat[test_name])
# Retrieve root directory for all dbPaths from fixture.
input_files = test.fixture.get_dbpath_prefix()
s3_bucket = config.ARCHIVE_BUCKET
- s3_path = "{}/{}/{}/datafiles/{}".format(
- config.EVERGREEN_PROJECT_NAME,
- config.EVERGREEN_VARIANT_NAME,
- config.EVERGREEN_REVISION,
- file_name)
+ s3_path = "{}/{}/{}/datafiles/{}".format(config.EVERGREEN_PROJECT_NAME,
+ config.EVERGREEN_VARIANT_NAME,
+ config.EVERGREEN_REVISION, file_name)
display_name = "Data files {} - Execution {} Repetition {}".format(
- test_name,
- config.EVERGREEN_EXECUTION,
- self._tests_repeat[test_name])
+ test_name, config.EVERGREEN_EXECUTION, self._tests_repeat[test_name])
logger.info("Archiving data files for test %s from %s", test_name, input_files)
- status, message = self.archive_instance.archive_files_to_s3(
- display_name, input_files, s3_bucket, s3_path)
+ status, message = self.archive_instance.archive_files_to_s3(display_name, input_files,
+ s3_bucket, s3_path)
if status:
logger.warning("Archive failed for %s: %s", test_name, message)
diff --git a/buildscripts/resmokelib/testing/hooks/__init__.py b/buildscripts/resmokelib/testing/hooks/__init__.py
index 40cc09c78cc..87efcd1c964 100644
--- a/buildscripts/resmokelib/testing/hooks/__init__.py
+++ b/buildscripts/resmokelib/testing/hooks/__init__.py
@@ -1,4 +1,5 @@
-"""
+"""Testing hooks package.
+
Package containing classes to customize the behavior of a test fixture
by allowing special code to be executed before or after each test, and
before or after each suite.
@@ -9,7 +10,6 @@ from __future__ import absolute_import
from .interface import make_hook
from ...utils import autoloader as _autoloader
-
# We dynamically load all modules in the hooks/ package so that any Hook classes declared
# within them are automatically registered.
_autoloader.load_all_modules(name=__name__, path=__path__)
diff --git a/buildscripts/resmokelib/testing/hooks/cleanup.py b/buildscripts/resmokelib/testing/hooks/cleanup.py
index 6442f035114..39011ec90fd 100644
--- a/buildscripts/resmokelib/testing/hooks/cleanup.py
+++ b/buildscripts/resmokelib/testing/hooks/cleanup.py
@@ -35,8 +35,8 @@ class CleanEveryN(interface.Hook):
if self.tests_run < self.n:
return
- hook_test_case = CleanEveryNTestCase.create_after_test(
- self.logger.test_case_logger, test, self)
+ hook_test_case = CleanEveryNTestCase.create_after_test(self.logger.test_case_logger, test,
+ self)
hook_test_case.configure(self.fixture)
hook_test_case.run_dynamic_test(test_report)
diff --git a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
index 2df4296dad5..7f1bea31cf3 100644
--- a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
+++ b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
@@ -68,8 +68,7 @@ class CombineBenchmarkResults(interface.Hook):
for name, report in self.benchmark_reports.items():
test_report = {
- "name": name,
- "results": report.generate_perf_plugin_dict(),
+ "name": name, "results": report.generate_perf_plugin_dict(),
"context": report.context._asdict()
}
@@ -124,11 +123,7 @@ class _BenchmarkThreadsReport(object):
}
"""
CONTEXT_FIELDS = [
- "date",
- "cpu_scaling_enabled",
- "num_cpus",
- "mhz_per_cpu",
- "library_build_type"
+ "date", "cpu_scaling_enabled", "num_cpus", "mhz_per_cpu", "library_build_type"
]
Context = collections.namedtuple("Context", CONTEXT_FIELDS)
@@ -163,8 +158,8 @@ class _BenchmarkThreadsReport(object):
res = {}
for thread_count, reports in self.thread_benchmark_map.items():
- if (thread_count.endswith("median") or thread_count.endswith("mean") or
- thread_count.endswith("stddev")):
+ if (thread_count.endswith("median") or thread_count.endswith("mean")
+ or thread_count.endswith("stddev")):
# We don't use Benchmark's included statistics for now because they clutter up the
# graph.
continue
diff --git a/buildscripts/resmokelib/testing/hooks/dbhash.py b/buildscripts/resmokelib/testing/hooks/dbhash.py
index f5081fd5e7d..70516b500db 100644
--- a/buildscripts/resmokelib/testing/hooks/dbhash.py
+++ b/buildscripts/resmokelib/testing/hooks/dbhash.py
@@ -14,12 +14,9 @@ class CheckReplDBHash(jsfile.JSHook):
Checks that the dbhashes of all non-local databases and non-replicated system collections
match on the primary and secondaries.
"""
+
def __init__(self, hook_logger, fixture, shell_options=None):
description = "Check dbhashes of all replica set or master/slave members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash.js")
- jsfile.JSHook.__init__(self,
- hook_logger,
- fixture,
- js_filename,
- description,
+ jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/hooks/initialsync.py b/buildscripts/resmokelib/testing/hooks/initialsync.py
index 328c4ac182e..905d0a1e913 100644
--- a/buildscripts/resmokelib/testing/hooks/initialsync.py
+++ b/buildscripts/resmokelib/testing/hooks/initialsync.py
@@ -68,15 +68,12 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
# If it's been 'n' tests so far, wait for the initial sync node to finish syncing.
if self._hook.tests_run >= self._hook.n:
- self.logger.info(
- "%d tests have been run against the fixture, waiting for initial sync"
- " node to go into SECONDARY state",
- self._hook.tests_run)
+ self.logger.info("%d tests have been run against the fixture, waiting for initial sync"
+ " node to go into SECONDARY state", self._hook.tests_run)
self._hook.tests_run = 0
- cmd = bson.SON([("replSetTest", 1),
- ("waitForMemberState", 2),
- ("timeoutMillis", 20 * 60 * 1000)])
+ cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2), ("timeoutMillis",
+ 20 * 60 * 1000)])
sync_node_conn.admin.command(cmd)
# Check if the initial sync node is in SECONDARY state. If it's been 'n' tests, then it
@@ -90,11 +87,9 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
self.logger.exception("{0} failed: {1}".format(self._hook.description, msg))
raise errors.TestFailure(msg)
- self.logger.info(
- "Initial sync node is in state %d, not state SECONDARY (2)."
- " Skipping BackgroundInitialSync hook for %s",
- state,
- self._base_test_name)
+ self.logger.info("Initial sync node is in state %d, not state SECONDARY (2)."
+ " Skipping BackgroundInitialSync hook for %s", state,
+ self._base_test_name)
# If we have not restarted initial sync since the last time we ran the data
# validation, restart initial sync with a 20% probability.
@@ -175,8 +170,8 @@ class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase):
JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js")
def __init__(self, logger, test_name, description, base_test_name, hook):
- jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description,
- base_test_name, hook, self.JS_FILENAME)
+ jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name,
+ hook, self.JS_FILENAME)
def run_test(self):
sync_node = self.fixture.get_initial_sync_node()
@@ -190,9 +185,8 @@ class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase):
# Do initial sync round.
self.logger.info("Waiting for initial sync node to go into SECONDARY state")
- cmd = bson.SON([("replSetTest", 1),
- ("waitForMemberState", 2),
- ("timeoutMillis", 20 * 60 * 1000)])
+ cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2), ("timeoutMillis",
+ 20 * 60 * 1000)])
sync_node_conn.admin.command(cmd)
# Run data validation and dbhash checking.
diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py
index cc854f75c42..877b2cc565f 100644
--- a/buildscripts/resmokelib/testing/hooks/interface.py
+++ b/buildscripts/resmokelib/testing/hooks/interface.py
@@ -11,7 +11,6 @@ from ... import errors
from ...logging import loggers
from ...utils import registry
-
_HOOKS = {}
diff --git a/buildscripts/resmokelib/testing/hooks/jsfile.py b/buildscripts/resmokelib/testing/hooks/jsfile.py
index 98c89e60534..65398efd0bf 100644
--- a/buildscripts/resmokelib/testing/hooks/jsfile.py
+++ b/buildscripts/resmokelib/testing/hooks/jsfile.py
@@ -5,7 +5,6 @@ JavaScript file.
from __future__ import absolute_import
-
from . import interface
from ..testcases import jstest
from ...utils import registry
@@ -38,10 +37,11 @@ class JSHook(interface.Hook):
class DynamicJSTestCase(interface.DynamicTestCase):
"""A dynamic TestCase that runs a JavaScript file."""
- def __init__(self, logger, test_name, description, base_test_name, hook,
- js_filename, shell_options=None):
- interface.DynamicTestCase.__init__(self, logger, test_name, description,
- base_test_name, hook)
+
+ def __init__(self, logger, test_name, description, base_test_name, hook, js_filename,
+ shell_options=None):
+ interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
+ hook)
self._js_test = jstest.JSTestCase(logger, js_filename, shell_options=shell_options)
def override_logger(self, new_logger):
diff --git a/buildscripts/resmokelib/testing/hooks/oplog.py b/buildscripts/resmokelib/testing/hooks/oplog.py
index bb75b1bf501..225634ce084 100644
--- a/buildscripts/resmokelib/testing/hooks/oplog.py
+++ b/buildscripts/resmokelib/testing/hooks/oplog.py
@@ -14,12 +14,9 @@ class CheckReplOplogs(jsfile.JSHook):
"""
Checks that local.oplog.rs matches on the primary and secondaries.
"""
+
def __init__(self, hook_logger, fixture, shell_options=None):
description = "Check oplogs of all replica set members"
js_filename = os.path.join("jstests", "hooks", "run_check_repl_oplogs.js")
- jsfile.JSHook.__init__(self,
- hook_logger,
- fixture,
- js_filename,
- description,
+ jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
index 09e8d00514f..a6924fe52b6 100644
--- a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
+++ b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
@@ -91,12 +91,11 @@ class PeriodicKillSecondaries(interface.Hook):
# applying any oplog entries while the test is running.
client = secondary.mongo_client()
try:
- client.admin.command(bson.SON([
- ("configureFailPoint", "rsSyncApplyStop"),
- ("mode", "alwaysOn")]))
+ client.admin.command(
+ bson.SON([("configureFailPoint", "rsSyncApplyStop"), ("mode", "alwaysOn")]))
except pymongo.errors.OperationFailure as err:
- self.logger.exception(
- "Unable to disable oplog application on the mongod on port %d", secondary.port)
+ self.logger.exception("Unable to disable oplog application on the mongod on port %d",
+ secondary.port)
raise errors.ServerFailure(
"Unable to disable oplog application on the mongod on port {}: {}".format(
secondary.port, err.args[0]))
@@ -106,13 +105,11 @@ class PeriodicKillSecondaries(interface.Hook):
# oplog entries.
client = secondary.mongo_client()
try:
- client.admin.command(bson.SON([
- ("configureFailPoint", "rsSyncApplyStop"),
- ("mode", "off")]))
+ client.admin.command(
+ bson.SON([("configureFailPoint", "rsSyncApplyStop"), ("mode", "off")]))
except pymongo.errors.OperationFailure as err:
- self.logger.exception(
- "Unable to re-enable oplog application on the mongod on port %d",
- secondary.port)
+ self.logger.exception("Unable to re-enable oplog application on the mongod on port %d",
+ secondary.port)
raise errors.ServerFailure(
"Unable to re-enable oplog application on the mongod on port {}: {}".format(
secondary.port, err.args[0]))
@@ -120,8 +117,8 @@ class PeriodicKillSecondaries(interface.Hook):
class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
def __init__(self, logger, test_name, description, base_test_name, hook, test_report):
- interface.DynamicTestCase.__init__(self, logger, test_name, description,
- base_test_name, hook)
+ interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
+ hook)
self._test_report = test_report
def run_test(self):
@@ -243,10 +240,11 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
client = secondary.mongo_client()
minvalid_doc = client.local["replset.minvalid"].find_one()
oplog_truncate_after_doc = client.local["replset.oplogTruncateAfterPoint"].find_one()
- self.logger.info("minValid: {}, oTAP: {}".format(minvalid_doc, oplog_truncate_after_doc))
+ self.logger.info("minValid: {}, oTAP: {}".format(minvalid_doc,
+ oplog_truncate_after_doc))
- latest_oplog_doc = client.local["oplog.rs"].find_one(
- sort=[("$natural", pymongo.DESCENDING)])
+ latest_oplog_doc = client.local["oplog.rs"].find_one(sort=[("$natural",
+ pymongo.DESCENDING)])
null_ts = bson.Timestamp(0, 0)
@@ -255,8 +253,8 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
if latest_oplog_doc is not None:
latest_oplog_entry_ts = latest_oplog_doc.get("ts")
if latest_oplog_entry_ts is None:
- raise errors.ServerFailure("Latest oplog entry had no 'ts' field: {}".format(
- latest_oplog_doc))
+ raise errors.ServerFailure(
+ "Latest oplog entry had no 'ts' field: {}".format(latest_oplog_doc))
# The "oplogTruncateAfterPoint" document may not exist at startup. If so, we default
# it to null.
@@ -310,9 +308,9 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
raise errors.ServerFailure(
"The condition minValid <= oplogTruncateAfterPoint ({} <= {}) doesn't"
" hold: minValid document={}, oplogTruncateAfterPoint document={},"
- " latest oplog entry={}".format(
- minvalid_ts, oplog_truncate_after_ts, minvalid_doc,
- oplog_truncate_after_doc, latest_oplog_doc))
+ " latest oplog entry={}".format(minvalid_ts, oplog_truncate_after_ts,
+ minvalid_doc, oplog_truncate_after_doc,
+ latest_oplog_doc))
# minvalid <= latest oplog entry
# "minValid" is set to the end of a batch after the batch is written to the oplog.
@@ -321,8 +319,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
raise errors.ServerFailure(
"The condition minValid <= top of oplog ({} <= {}) doesn't"
" hold: minValid document={}, latest oplog entry={}".format(
- minvalid_ts, latest_oplog_entry_ts, minvalid_doc,
- latest_oplog_doc))
+ minvalid_ts, latest_oplog_entry_ts, minvalid_doc, latest_oplog_doc))
try:
secondary.teardown()
@@ -346,15 +343,16 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
def _await_secondary_state(self, secondary):
client = secondary.mongo_client()
try:
- client.admin.command(bson.SON([
- ("replSetTest", 1),
- ("waitForMemberState", 2), # 2 = SECONDARY
- ("timeoutMillis", fixture.ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60 * 1000)]))
+ client.admin.command(
+ bson.SON([
+ ("replSetTest", 1),
+ ("waitForMemberState", 2), # 2 = SECONDARY
+ ("timeoutMillis", fixture.ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60 * 1000)
+ ]))
except pymongo.errors.OperationFailure as err:
self.logger.exception(
"mongod on port %d failed to reach state SECONDARY after %d seconds",
- secondary.port,
- fixture.ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60)
+ secondary.port, fixture.ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60)
raise errors.ServerFailure(
"mongod on port {} failed to reach state SECONDARY after {} seconds: {}".format(
secondary.port, fixture.ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60, err.args[0]))
diff --git a/buildscripts/resmokelib/testing/hooks/stepdown.py b/buildscripts/resmokelib/testing/hooks/stepdown.py
index 04db8ae6adc..9e6e99d6663 100644
--- a/buildscripts/resmokelib/testing/hooks/stepdown.py
+++ b/buildscripts/resmokelib/testing/hooks/stepdown.py
@@ -24,11 +24,8 @@ class ContinuousStepdown(interface.Hook):
DESCRIPTION = ("Continuous stepdown (steps down the primary of replica sets at regular"
" intervals)")
- def __init__(self, hook_logger, fixture,
- config_stepdown=True,
- shard_stepdown=True,
- stepdown_duration_secs=10,
- stepdown_interval_ms=8000):
+ def __init__(self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True,
+ stepdown_duration_secs=10, stepdown_interval_ms=8000):
"""Initializes the ContinuousStepdown.
Args:
@@ -39,8 +36,7 @@ class ContinuousStepdown(interface.Hook):
stepdown_duration_secs: the number of seconds to step down the primary.
stepdown_interval_ms: the number of milliseconds between stepdowns.
"""
- interface.Hook.__init__(self, hook_logger, fixture,
- ContinuousStepdown.DESCRIPTION)
+ interface.Hook.__init__(self, hook_logger, fixture, ContinuousStepdown.DESCRIPTION)
self._fixture = fixture
self._config_stepdown = config_stepdown
@@ -190,17 +186,18 @@ class _StepdownThread(threading.Thread):
# We'll try again after self._stepdown_interval_secs seconds.
return
- self.logger.info("Stepping down the primary on port %d of replica set '%s'.",
- primary.port, rs_fixture.replset_name)
+ self.logger.info("Stepping down the primary on port %d of replica set '%s'.", primary.port,
+ rs_fixture.replset_name)
secondaries = rs_fixture.get_secondaries()
try:
client = primary.mongo_client()
- client.admin.command(bson.SON([
- ("replSetStepDown", self._stepdown_duration_secs),
- ("force", True),
- ]))
+ client.admin.command(
+ bson.SON([
+ ("replSetStepDown", self._stepdown_duration_secs),
+ ("force", True),
+ ]))
except pymongo.errors.AutoReconnect:
# AutoReconnect exceptions are expected as connections are closed during stepdown.
pass
diff --git a/buildscripts/resmokelib/testing/hooks/validate.py b/buildscripts/resmokelib/testing/hooks/validate.py
index 66a5d6ec6db..24c9323342f 100644
--- a/buildscripts/resmokelib/testing/hooks/validate.py
+++ b/buildscripts/resmokelib/testing/hooks/validate.py
@@ -15,12 +15,9 @@ class ValidateCollections(jsfile.JSHook):
Runs full validation on all collections in all databases on every stand-alone
node, primary replica-set node, or primary shard node.
"""
+
def __init__(self, hook_logger, fixture, shell_options=None):
description = "Full collection validation"
js_filename = os.path.join("jstests", "hooks", "run_validate_collections.js")
- jsfile.JSHook.__init__(self,
- hook_logger,
- fixture,
- js_filename,
- description,
+ jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
shell_options=shell_options)
diff --git a/buildscripts/resmokelib/testing/job.py b/buildscripts/resmokelib/testing/job.py
index 6a8b98e3e36..33831f4e84c 100644
--- a/buildscripts/resmokelib/testing/job.py
+++ b/buildscripts/resmokelib/testing/job.py
@@ -110,8 +110,8 @@ class Job(object):
test.shortDescription())
self.report.setFailure(test, return_code=2)
# Always fail fast if the fixture fails.
- raise errors.StopExecution("%s not running after %s" %
- (self.fixture, test.shortDescription()))
+ raise errors.StopExecution("%s not running after %s" % (self.fixture,
+ test.shortDescription()))
finally:
success = self.report._find_test_info(test).status == "pass"
if self.archival:
diff --git a/buildscripts/resmokelib/testing/report.py b/buildscripts/resmokelib/testing/report.py
index 197db9328c5..f13cfdc9a84 100644
--- a/buildscripts/resmokelib/testing/report.py
+++ b/buildscripts/resmokelib/testing/report.py
@@ -113,8 +113,8 @@ class TestReport(unittest.TestResult):
self.num_dynamic += 1
# Set up the test-specific logger.
- test_logger = self.job_logger.new_test_logger(test.short_name(), test.basename(),
- command, test.logger)
+ test_logger = self.job_logger.new_test_logger(test.short_name(), test.basename(), command,
+ test.logger)
test_info.url_endpoint = test_logger.url_endpoint
test.override_logger(test_logger)
diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py
index 59a88f33d72..07d72cb65b4 100644
--- a/buildscripts/resmokelib/testing/suite.py
+++ b/buildscripts/resmokelib/testing/suite.py
@@ -105,15 +105,19 @@ class Suite(object):
if self.options.include_tags is not None:
if "include_tags" in selector:
- selector["include_tags"] = {"$allOf": [
- selector["include_tags"],
- self.options.include_tags,
- ]}
+ selector["include_tags"] = {
+ "$allOf": [
+ selector["include_tags"],
+ self.options.include_tags,
+ ]
+ }
elif "exclude_tags" in selector:
- selector["exclude_tags"] = {"$anyOf": [
- selector["exclude_tags"],
- {"$not": self.options.include_tags},
- ]}
+ selector["exclude_tags"] = {
+ "$anyOf": [
+ selector["exclude_tags"],
+ {"$not": self.options.include_tags},
+ ]
+ }
else:
selector["include_tags"] = self.options.include_tags
@@ -267,11 +271,8 @@ class Suite(object):
for iteration in xrange(num_iterations):
# Summarize each execution as a bulleted list of results.
bulleter_sb = []
- summary = self._summarize_report(
- reports[iteration],
- start_times[iteration],
- end_times[iteration],
- bulleter_sb)
+ summary = self._summarize_report(reports[iteration], start_times[iteration],
+ end_times[iteration], bulleter_sb)
combined_summary = _summary.combine(combined_summary, summary)
for (i, line) in enumerate(bulleter_sb):
@@ -288,10 +289,8 @@ class Suite(object):
string builder 'sb'.
"""
- return self._summarize_report(self._reports[iteration],
- self._test_start_times[iteration],
- self._test_end_times[iteration],
- sb)
+ return self._summarize_report(self._reports[iteration], self._test_start_times[iteration],
+ self._test_end_times[iteration], sb)
def _summarize_report(self, report, start_time, end_time, sb):
"""
@@ -335,8 +334,8 @@ class Suite(object):
@staticmethod
def log_summaries(logger, suites, time_taken):
sb = []
- sb.append("Summary of all suites: %d suites ran in %0.2f seconds"
- % (len(suites), time_taken))
+ sb.append("Summary of all suites: %d suites ran in %0.2f seconds" % (len(suites),
+ time_taken))
for suite in suites:
suite_sb = []
suite.summarize(suite_sb)
diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py
index bb44472caa4..cf3649c3e16 100644
--- a/buildscripts/resmokelib/testing/summary.py
+++ b/buildscripts/resmokelib/testing/summary.py
@@ -6,10 +6,9 @@ from __future__ import absolute_import
import collections
-
-
-Summary = collections.namedtuple("Summary", ["num_run", "time_taken", "num_succeeded",
- "num_skipped", "num_failed", "num_errored"])
+Summary = collections.namedtuple(
+ "Summary",
+ ["num_run", "time_taken", "num_succeeded", "num_skipped", "num_failed", "num_errored"])
def combine(summary1, summary2):
diff --git a/buildscripts/resmokelib/testing/testcases/__init__.py b/buildscripts/resmokelib/testing/testcases/__init__.py
index 047b5c1f3f0..a397c04fda6 100644
--- a/buildscripts/resmokelib/testing/testcases/__init__.py
+++ b/buildscripts/resmokelib/testing/testcases/__init__.py
@@ -7,7 +7,6 @@ from __future__ import absolute_import
from .interface import make_test_case
from ...utils import autoloader as _autoloader
-
# We dynamically load all modules in the testcases/ package so that any TestCase classes declared
# within them are automatically registered.
_autoloader.load_all_modules(name=__name__, path=__path__)
diff --git a/buildscripts/resmokelib/testing/testcases/benchmark_test.py b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
index ac769adf3a8..5002ea37eb7 100644
--- a/buildscripts/resmokelib/testing/testcases/benchmark_test.py
+++ b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
@@ -18,10 +18,7 @@ class BenchmarkTestCase(interface.ProcessTestCase):
REGISTERED_NAME = "benchmark_test"
- def __init__(self,
- logger,
- program_executable,
- program_options=None):
+ def __init__(self, logger, program_executable, program_options=None):
"""
Initializes the BenchmarkTestCase with the executable to run.
"""
@@ -49,9 +46,8 @@ class BenchmarkTestCase(interface.ProcessTestCase):
# 3. Override Benchmark options with options set through resmoke's command line.
resmoke_bm_options = {
- "benchmark_filter": _config.BENCHMARK_FILTER,
- "benchmark_list_tests": _config.BENCHMARK_LIST_TESTS,
- "benchmark_min_time": _config.BENCHMARK_MIN_TIME,
+ "benchmark_filter": _config.BENCHMARK_FILTER, "benchmark_list_tests":
+ _config.BENCHMARK_LIST_TESTS, "benchmark_min_time": _config.BENCHMARK_MIN_TIME,
"benchmark_out_format": _config.BENCHMARK_OUT_FORMAT,
"benchmark_repetitions": _config.BENCHMARK_REPETITIONS
}
@@ -69,6 +65,4 @@ class BenchmarkTestCase(interface.ProcessTestCase):
return self.bm_executable + ".json"
def _make_process(self):
- return core.programs.generic_program(self.logger,
- [self.bm_executable],
- **self.bm_options)
+ return core.programs.generic_program(self.logger, [self.bm_executable], **self.bm_options)
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
index 82dcd0cb275..b4170581821 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
@@ -16,10 +16,7 @@ class CPPIntegrationTestCase(interface.ProcessTestCase):
REGISTERED_NAME = "cpp_integration_test"
- def __init__(self,
- logger,
- program_executable,
- program_options=None):
+ def __init__(self, logger, program_executable, program_options=None):
"""
Initializes the CPPIntegrationTestCase with the executable to run.
"""
@@ -35,6 +32,5 @@ class CPPIntegrationTestCase(interface.ProcessTestCase):
self.program_options["connectionString"] = self.fixture.get_internal_connection_string()
def _make_process(self):
- return core.programs.generic_program(self.logger,
- [self.program_executable],
+ return core.programs.generic_program(self.logger, [self.program_executable],
**self.program_options)
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
index b287db64057..96f20796911 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
@@ -16,10 +16,7 @@ class CPPUnitTestCase(interface.ProcessTestCase):
REGISTERED_NAME = "cpp_unit_test"
- def __init__(self,
- logger,
- program_executable,
- program_options=None):
+ def __init__(self, logger, program_executable, program_options=None):
"""
Initializes the CPPUnitTestCase with the executable to run.
"""
@@ -30,6 +27,4 @@ class CPPUnitTestCase(interface.ProcessTestCase):
self.program_options = utils.default_if_none(program_options, {}).copy()
def _make_process(self):
- return core.process.Process(self.logger,
- [self.program_executable],
- **self.program_options)
+ return core.process.Process(self.logger, [self.program_executable], **self.program_options)
diff --git a/buildscripts/resmokelib/testing/testcases/dbtest.py b/buildscripts/resmokelib/testing/testcases/dbtest.py
index 8e7861782ea..15316a0f197 100644
--- a/buildscripts/resmokelib/testing/testcases/dbtest.py
+++ b/buildscripts/resmokelib/testing/testcases/dbtest.py
@@ -21,11 +21,7 @@ class DBTestCase(interface.ProcessTestCase):
REGISTERED_NAME = "db_test"
- def __init__(self,
- logger,
- dbtest_suite,
- dbtest_executable=None,
- dbtest_options=None):
+ def __init__(self, logger, dbtest_suite, dbtest_executable=None, dbtest_options=None):
"""
Initializes the DBTestCase with the dbtest suite to run.
"""
@@ -62,10 +58,8 @@ class DBTestCase(interface.ProcessTestCase):
shutil.rmtree(self.dbtest_options["dbpath"], ignore_errors=True)
def _make_process(self):
- return core.programs.dbtest_program(self.logger,
- executable=self.dbtest_executable,
- suites=[self.dbtest_suite],
- **self.dbtest_options)
+ return core.programs.dbtest_program(self.logger, executable=self.dbtest_executable,
+ suites=[self.dbtest_suite], **self.dbtest_options)
@staticmethod
def _get_dbpath_prefix():
diff --git a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
index a1127136b3c..0d397200cfc 100644
--- a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
+++ b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
@@ -15,21 +15,13 @@ class FSMWorkloadTestCase(jsrunnerfile.JSRunnerFileTestCase):
REGISTERED_NAME = "fsm_workload_test"
- def __init__(self,
- logger,
- fsm_workload,
- shell_executable=None,
- shell_options=None):
+ def __init__(self, logger, fsm_workload, shell_executable=None, shell_options=None):
"""Initializes the FSMWorkloadTestCase with the FSM workload file."""
jsrunnerfile.JSRunnerFileTestCase.__init__(
- self,
- logger,
- "FSM workload",
- fsm_workload,
+ self, logger, "FSM workload", fsm_workload,
test_runner_file="jstests/concurrency/fsm_libs/resmoke_runner.js",
- shell_executable=shell_executable,
- shell_options=shell_options)
+ shell_executable=shell_executable, shell_options=shell_options)
@property
def fsm_workload(self):
diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py
index c7ed470f252..f66abef0f3b 100644
--- a/buildscripts/resmokelib/testing/testcases/interface.py
+++ b/buildscripts/resmokelib/testing/testcases/interface.py
@@ -12,7 +12,6 @@ import unittest
from ... import logging
from ...utils import registry
-
_TEST_CASES = {}
@@ -139,8 +138,8 @@ class ProcessTestCase(TestCase): # pylint: disable=abstract-method
except self.failureException:
raise
except:
- self.logger.exception("Encountered an error running %s %s",
- self.test_kind, self.basename())
+ self.logger.exception("Encountered an error running %s %s", self.test_kind,
+ self.basename())
raise
def as_command(self):
diff --git a/buildscripts/resmokelib/testing/testcases/json_schema_test.py b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
index 24ad11e9b5a..8380b246bf6 100644
--- a/buildscripts/resmokelib/testing/testcases/json_schema_test.py
+++ b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
@@ -15,21 +15,13 @@ class JSONSchemaTestCase(jsrunnerfile.JSRunnerFileTestCase):
REGISTERED_NAME = "json_schema_test"
- def __init__(self,
- logger,
- json_filename,
- shell_executable=None,
- shell_options=None):
+ def __init__(self, logger, json_filename, shell_executable=None, shell_options=None):
"""Initializes the JSONSchemaTestCase with the JSON test file."""
jsrunnerfile.JSRunnerFileTestCase.__init__(
- self,
- logger,
- "JSON Schema test",
- json_filename,
+ self, logger, "JSON Schema test", json_filename,
test_runner_file="jstests/libs/json_schema_test_runner.js",
- shell_executable=shell_executable,
- shell_options=shell_options)
+ shell_executable=shell_executable, shell_options=shell_options)
@property
def json_filename(self):
diff --git a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
index 070784a1c6e..45a9e5d4944 100644
--- a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
+++ b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
@@ -16,12 +16,7 @@ class JSRunnerFileTestCase(interface.ProcessTestCase):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
- def __init__(self,
- logger,
- test_kind,
- test_name,
- test_runner_file,
- shell_executable=None,
+ def __init__(self, logger, test_kind, test_name, test_runner_file, shell_executable=None,
shell_options=None):
"""Initializes the JSRunnerFileTestCase with the 'test_name' file."""
@@ -53,8 +48,6 @@ class JSRunnerFileTestCase(interface.ProcessTestCase):
def _make_process(self):
return core.programs.mongo_shell_program(
- self.logger,
- executable=self.shell_executable,
+ self.logger, executable=self.shell_executable,
connection_string=self.fixture.get_driver_connection_url(),
- filename=self.test_runner_file,
- **self.shell_options)
+ filename=self.test_runner_file, **self.shell_options)
diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py
index b8320dcaba0..747e43fe01f 100644
--- a/buildscripts/resmokelib/testing/testcases/jstest.py
+++ b/buildscripts/resmokelib/testing/testcases/jstest.py
@@ -24,11 +24,7 @@ class _SingleJSTestCase(interface.ProcessTestCase):
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
- def __init__(self,
- logger,
- js_filename,
- shell_executable=None,
- shell_options=None):
+ def __init__(self, logger, js_filename, shell_executable=None, shell_options=None):
"""
Initializes the _SingleJSTestCase with the JS file to run.
"""
@@ -114,17 +110,13 @@ class _SingleJSTestCase(interface.ProcessTestCase):
data_dir_prefix = utils.default_if_none(config.DBPATH_PREFIX,
global_vars.get("MongoRunner.dataDir"))
data_dir_prefix = utils.default_if_none(data_dir_prefix, config.DEFAULT_DBPATH_PREFIX)
- return os.path.join(data_dir_prefix,
- "job%d" % self.fixture.job_num,
+ return os.path.join(data_dir_prefix, "job%d" % self.fixture.job_num,
config.MONGO_RUNNER_SUBDIR)
def _make_process(self):
return core.programs.mongo_shell_program(
- self.logger,
- executable=self.shell_executable,
- filename=self.js_filename,
- connection_string=self.fixture.get_driver_connection_url(),
- **self.shell_options)
+ self.logger, executable=self.shell_executable, filename=self.js_filename,
+ connection_string=self.fixture.get_driver_connection_url(), **self.shell_options)
class JSTestCase(interface.ProcessTestCase):
@@ -151,11 +143,7 @@ class JSTestCase(interface.ProcessTestCase):
DEFAULT_CLIENT_NUM = 1
- def __init__(self,
- logger,
- js_filename,
- shell_executable=None,
- shell_options=None):
+ def __init__(self, logger, js_filename, shell_executable=None, shell_options=None):
"""
Initializes the JSTestCase with the JS file to run.
"""
@@ -204,10 +192,8 @@ class JSTestCase(interface.ProcessTestCase):
"""
shell_options = self._get_shell_options_for_thread(thread_id)
- test_case = _SingleJSTestCase(logger,
- self.test_case_template.js_filename,
- self.test_case_template.shell_executable,
- shell_options)
+ test_case = _SingleJSTestCase(logger, self.test_case_template.js_filename,
+ self.test_case_template.shell_executable, shell_options)
test_case.configure(self.fixture)
return test_case
@@ -253,9 +239,8 @@ class JSTestCase(interface.ProcessTestCase):
if thread.exc_info is not None:
if not isinstance(thread.exc_info[1], self.failureException):
self.logger.error(
- "Encountered an error inside thread %d running jstest %s.",
- thread_id, self.basename(),
- exc_info=thread.exc_info)
+ "Encountered an error inside thread %d running jstest %s.", thread_id,
+ self.basename(), exc_info=thread.exc_info)
raise thread.exc_info
def run_test(self):
diff --git a/buildscripts/resmokelib/testing/testcases/mongos_test.py b/buildscripts/resmokelib/testing/testcases/mongos_test.py
index ef29069a7eb..64b39a32cd9 100644
--- a/buildscripts/resmokelib/testing/testcases/mongos_test.py
+++ b/buildscripts/resmokelib/testing/testcases/mongos_test.py
@@ -17,9 +17,7 @@ class MongosTestCase(interface.ProcessTestCase):
REGISTERED_NAME = "mongos_test"
- def __init__(self,
- logger,
- mongos_options):
+ def __init__(self, logger, mongos_options):
"""
Initializes the mongos test and saves the options.
"""
@@ -41,6 +39,5 @@ class MongosTestCase(interface.ProcessTestCase):
self.options["test"] = ""
def _make_process(self):
- return core.programs.mongos_program(self.logger,
- executable=self.mongos_executable,
+ return core.programs.mongos_program(self.logger, executable=self.mongos_executable,
**self.options)
diff --git a/buildscripts/resmokelib/testing/testcases/sleeptest.py b/buildscripts/resmokelib/testing/testcases/sleeptest.py
index 4f5f695937f..f521ecea870 100644
--- a/buildscripts/resmokelib/testing/testcases/sleeptest.py
+++ b/buildscripts/resmokelib/testing/testcases/sleeptest.py
@@ -20,8 +20,8 @@ class SleepTestCase(interface.TestCase):
sleep_duration_secs = int(sleep_duration_secs)
- interface.TestCase.__init__(
- self, logger, "Sleep", "{:d} seconds".format(sleep_duration_secs))
+ interface.TestCase.__init__(self, logger, "Sleep",
+ "{:d} seconds".format(sleep_duration_secs))
self.__sleep_duration_secs = sleep_duration_secs
diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py
index ea7f889f754..999e56b99ae 100644
--- a/buildscripts/resmokelib/utils/archival.py
+++ b/buildscripts/resmokelib/utils/archival.py
@@ -20,18 +20,13 @@ _IS_WINDOWS = sys.platform == "win32" or sys.platform == "cygwin"
if _IS_WINDOWS:
import ctypes
-UploadArgs = collections.namedtuple(
- "UploadArgs",
- ["archival_file",
- "display_name",
- "local_file",
- "content_type",
- "s3_bucket",
- "s3_path",
- "delete_file"])
+UploadArgs = collections.namedtuple("UploadArgs", [
+ "archival_file", "display_name", "local_file", "content_type", "s3_bucket", "s3_path",
+ "delete_file"
+])
-ArchiveArgs = collections.namedtuple(
- "ArchiveArgs", ["archival_file", "display_name", "remote_file"])
+ArchiveArgs = collections.namedtuple("ArchiveArgs",
+ ["archival_file", "display_name", "remote_file"])
def file_list_size(files):
@@ -97,11 +92,7 @@ def remove_file(file_name):
class Archival(object):
""" Class to support file archival to S3."""
- def __init__(self,
- logger,
- archival_json_file="archive.json",
- limit_size_mb=0,
- limit_files=0,
+ def __init__(self, logger, archival_json_file="archive.json", limit_size_mb=0, limit_files=0,
s3_client=None):
""" Archival init method. """
@@ -118,10 +109,9 @@ class Archival(object):
# Start the worker thread to update the 'archival_json_file'.
self._archive_file_queue = Queue.Queue()
- self._archive_file_worker = threading.Thread(
- target=self._update_archive_file_wkr,
- args=(self._archive_file_queue, logger),
- name="archive_file_worker")
+ self._archive_file_worker = threading.Thread(target=self._update_archive_file_wkr,
+ args=(self._archive_file_queue,
+ logger), name="archive_file_worker")
self._archive_file_worker.setDaemon(True)
self._archive_file_worker.start()
if not s3_client:
@@ -131,10 +121,9 @@ class Archival(object):
# Start the worker thread which uploads the archive.
self._upload_queue = Queue.Queue()
- self._upload_worker = threading.Thread(
- target=self._upload_to_s3_wkr,
- args=(self._upload_queue, self._archive_file_queue, logger, self.s3_client),
- name="upload_worker")
+ self._upload_worker = threading.Thread(target=self._upload_to_s3_wkr,
+ args=(self._upload_queue, self._archive_file_queue,
+ logger, self.s3_client), name="upload_worker")
self._upload_worker.setDaemon(True)
self._upload_worker.start()
@@ -167,11 +156,8 @@ class Archival(object):
status = 1
message = "Files not archived, {} file limit reached".format(self.limit_files)
else:
- status, message, file_size_mb = self._archive_files(
- display_name,
- input_files,
- s3_bucket,
- s3_path)
+ status, message, file_size_mb = self._archive_files(display_name, input_files,
+ s3_bucket, s3_path)
if status == 0:
self.num_files += 1
@@ -191,12 +177,11 @@ class Archival(object):
queue.task_done()
break
archival_record = {
- "name": archive_args.display_name,
- "link": archive_args.remote_file,
+ "name": archive_args.display_name, "link": archive_args.remote_file,
"visibility": "private"
}
- logger.debug(
- "Updating archive file %s with %s", archive_args.archival_file, archival_record)
+ logger.debug("Updating archive file %s with %s", archive_args.archival_file,
+ archival_record)
archival_json.append(archival_record)
with open(archive_args.archival_file, "w") as archival_fh:
json.dump(archival_json, archival_fh)
@@ -213,21 +198,15 @@ class Archival(object):
archive_file_queue.put(None)
break
extra_args = {"ContentType": upload_args.content_type, "ACL": "public-read"}
- logger.debug("Uploading to S3 %s to bucket %s path %s",
- upload_args.local_file,
- upload_args.s3_bucket,
- upload_args.s3_path)
+ logger.debug("Uploading to S3 %s to bucket %s path %s", upload_args.local_file,
+ upload_args.s3_bucket, upload_args.s3_path)
upload_completed = False
try:
- s3_client.upload_file(upload_args.local_file,
- upload_args.s3_bucket,
- upload_args.s3_path,
- ExtraArgs=extra_args)
+ s3_client.upload_file(upload_args.local_file, upload_args.s3_bucket,
+ upload_args.s3_path, ExtraArgs=extra_args)
upload_completed = True
logger.debug("Upload to S3 completed for %s to bucket %s path %s",
- upload_args.local_file,
- upload_args.s3_bucket,
- upload_args.s3_path)
+ upload_args.local_file, upload_args.s3_bucket, upload_args.s3_path)
except Exception as err:
logger.exception("Upload to S3 error %s", err)
@@ -236,11 +215,11 @@ class Archival(object):
if status:
logger.error("Upload to S3 delete file error %s", message)
- remote_file = "https://s3.amazonaws.com/{}/{}".format(
- upload_args.s3_bucket, upload_args.s3_path)
+ remote_file = "https://s3.amazonaws.com/{}/{}".format(upload_args.s3_bucket,
+ upload_args.s3_path)
if upload_completed:
- archive_file_queue.put(ArchiveArgs(
- upload_args.archival_file, upload_args.display_name, remote_file))
+ archive_file_queue.put(
+ ArchiveArgs(upload_args.archival_file, upload_args.display_name, remote_file))
queue.task_done()
@@ -288,14 +267,9 @@ class Archival(object):
# Round up the size of the archive.
size_mb = int(math.ceil(float(file_list_size(temp_file)) / (1024 * 1024)))
- self._upload_queue.put(UploadArgs(
- self.archival_json_file,
- display_name,
- temp_file,
- "application/x-gzip",
- s3_bucket,
- s3_path,
- True))
+ self._upload_queue.put(
+ UploadArgs(self.archival_json_file, display_name, temp_file, "application/x-gzip",
+ s3_bucket, s3_path, True))
return status, message, size_mb
diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py
index 644ebfe3e38..443d75b6b0c 100644
--- a/buildscripts/resmokelib/utils/globstar.py
+++ b/buildscripts/resmokelib/utils/globstar.py
@@ -9,7 +9,6 @@ import os
import os.path
import re
-
_GLOBSTAR = "**"
_CONTAINS_GLOB_PATTERN = re.compile("[*?[]")
diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py
index 18da7885820..43484573fac 100644
--- a/buildscripts/resmokelib/utils/jscomment.py
+++ b/buildscripts/resmokelib/utils/jscomment.py
@@ -8,7 +8,6 @@ import re
import yaml
-
# TODO: use a more robust regular expression for matching tags
_JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
@@ -43,8 +42,8 @@ def get_tags(pathname):
raise TypeError("Expected a list of string tags, but got '%s'" % (tags))
return tags
except yaml.YAMLError as err:
- raise ValueError("File '%s' contained invalid tags (expected YAML): %s"
- % (pathname, err))
+ raise ValueError("File '%s' contained invalid tags (expected YAML): %s" % (pathname,
+ err))
return []
diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py
index 80da5e2cc66..da059ffd852 100644
--- a/buildscripts/resmokelib/utils/queue.py
+++ b/buildscripts/resmokelib/utils/queue.py
@@ -12,7 +12,6 @@ from __future__ import absolute_import
import Queue
import time
-
# Exception that is raised when get_nowait() is called on an empty Queue.
Empty = Queue.Empty
diff --git a/buildscripts/resmokelib/utils/registry.py b/buildscripts/resmokelib/utils/registry.py
index 47d53d9d891..0a18c556e94 100644
--- a/buildscripts/resmokelib/utils/registry.py
+++ b/buildscripts/resmokelib/utils/registry.py
@@ -8,7 +8,6 @@ its name.
from __future__ import absolute_import
-
# Specifying 'LEAVE_UNREGISTERED' as the "REGISTERED_NAME" attribute will cause the class to be
# omitted from the registry. This is particularly useful for base classes that define an interface
# or common functionality, and aren't intended to be constructed explicitly.
@@ -51,9 +50,9 @@ def make_registry_metaclass(registry_store):
if registered_name is not LEAVE_UNREGISTERED:
if registered_name in registry_store:
- raise ValueError(
- "The name %s is already registered; a different value for the"
- " 'REGISTERED_NAME' attribute must be chosen" % (registered_name))
+ raise ValueError("The name %s is already registered; a different value for the"
+ " 'REGISTERED_NAME' attribute must be chosen" %
+ (registered_name))
registry_store[registered_name] = cls
return cls
diff --git a/buildscripts/scons.py b/buildscripts/scons.py
index b0b9cfa834e..b7da54ed99b 100755
--- a/buildscripts/scons.py
+++ b/buildscripts/scons.py
@@ -8,7 +8,7 @@ import sys
SCONS_VERSION = os.environ.get('SCONS_VERSION', "2.5.0")
mongodb_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
-scons_dir = os.path.join(mongodb_root, 'src', 'third_party','scons-' + SCONS_VERSION,
+scons_dir = os.path.join(mongodb_root, 'src', 'third_party', 'scons-' + SCONS_VERSION,
'scons-local-' + SCONS_VERSION)
if not os.path.exists(scons_dir):
diff --git a/buildscripts/scons_cache_prune.py b/buildscripts/scons_cache_prune.py
index d82ac77101d..21b5582bbed 100644
--- a/buildscripts/scons_cache_prune.py
+++ b/buildscripts/scons_cache_prune.py
@@ -18,7 +18,7 @@ import shutil
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("scons.cache.prune.lru")
-GIGBYTES = 1024*1024*1024
+GIGBYTES = 1024 * 1024 * 1024
cache_item = collections.namedtuple("CacheContents", ["path", "time", "size"])
@@ -40,8 +40,7 @@ def collect_cache_contents(cache_path):
"The cache may be currupt.".format(file_path))
continue
- item = cache_item(path=file_path,
- time=os.stat(file_path).st_atime,
+ item = cache_item(path=file_path, time=os.stat(file_path).st_atime,
size=os.stat(file_path).st_size)
total += item.size
@@ -104,8 +103,7 @@ def prune_cache(cache_path, cache_size_gb, clean_ratio):
def main():
parser = argparse.ArgumentParser(description="SCons cache pruning tool")
- parser.add_argument("--cache-dir", "-d", default=None,
- help="path to the cache directory.")
+ parser.add_argument("--cache-dir", "-d", default=None, help="path to the cache directory.")
parser.add_argument("--cache-size", "-s", default=200, type=int,
help="maximum size of cache in GB.")
parser.add_argument("--prune-ratio", "-p", default=0.8, type=float,
@@ -119,13 +117,13 @@ def main():
logger.error("must specify a valid cache path, [{0}]".format(args.cache_dir))
exit(1)
- ok = prune_cache(cache_path=args.cache_dir,
- cache_size_gb=args.cache_size,
+ ok = prune_cache(cache_path=args.cache_dir, cache_size_gb=args.cache_size,
clean_ratio=args.prune_ratio)
if not ok:
logger.error("encountered error cleaning the cache. exiting.")
exit(1)
+
if __name__ == "__main__":
main()
diff --git a/buildscripts/setup_multiversion_mongodb.py b/buildscripts/setup_multiversion_mongodb.py
index 449a1995c0c..8027f268369 100755
--- a/buildscripts/setup_multiversion_mongodb.py
+++ b/buildscripts/setup_multiversion_mongodb.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Install multiple versions of MongoDB on a machine."""
from __future__ import print_function
@@ -23,6 +22,7 @@ import zipfile
import requests
import requests.exceptions
+
def dump_stacks(_signal_num, _frame):
"""Dump stacks when SIGUSR1 is received."""
print("======================================")
@@ -102,8 +102,8 @@ def download_file(url, file_name, download_retries=5):
download_retries -= 1
if download_retries == 0:
raise Exception("Downloaded file size ({} bytes) doesn't match content length"
- "({} bytes) for URL {}".format(
- file_size, url_content_length, url))
+ "({} bytes) for URL {}".format(file_size, url_content_length,
+ url))
continue
return True
@@ -114,13 +114,7 @@ def download_file(url, file_name, download_retries=5):
class MultiVersionDownloader(object):
"""Class to support multiversion downloads."""
- def __init__(self,
- install_dir,
- link_dir,
- edition,
- platform,
- architecture,
- use_latest=False):
+ def __init__(self, install_dir, link_dir, edition, platform, architecture, use_latest=False):
self.install_dir = install_dir
self.link_dir = link_dir
self.edition = edition.lower()
@@ -174,12 +168,12 @@ class MultiVersionDownloader(object):
for download in json_version["downloads"]:
if "target" not in download or "edition" not in download:
continue
- if (download["target"].lower() == self.platform and
- download["arch"].lower() == self.architecture and
- download["edition"].lower() == self.edition):
+ if (download["target"].lower() == self.platform
+ and download["arch"].lower() == self.architecture
+ and download["edition"].lower() == self.edition):
links[version] = download["archive"]["url"]
- elif (download["target"].lower() == generic_target and
- download["edition"].lower() == "base"):
+ elif (download["target"].lower() == generic_target
+ and download["edition"].lower() == "base"):
generic_links[version] = download["archive"]["url"]
return links, generic_links
@@ -247,8 +241,8 @@ class MultiVersionDownloader(object):
# of the 'extract_dir' cannot be derived from the URL, since it contains the githash.
already_downloaded = os.path.isdir(os.path.join(self.install_dir, extract_dir))
if already_downloaded:
- print("Skipping download for version {} ({}) since the dest already exists '{}'"
- .format(version, full_version, extract_dir))
+ print("Skipping download for version {} ({}) since the dest already exists '{}'".format(
+ version, full_version, extract_dir))
return None
else:
temp_file = tempfile.mktemp(suffix=file_suffix)
@@ -352,6 +346,7 @@ class MultiVersionDownloader(object):
flags = 1 if os.path.isdir(source) else 0
if csl(link_name, source.replace("/", "\\"), flags) == 0:
raise ctypes.WinError()
+
os.symlink = symlink_ms
os.symlink(executable, executable_link)
except OSError as exc:
@@ -400,59 +395,37 @@ Note: If "rc" is included in the version name, we'll use the exact rc, otherwise
we'll pull the highest non-rc version compatible with the version specified.
""")
- parser.add_option("-i", "--installDir",
- dest="install_dir",
- help="Directory to install the download archive. [REQUIRED]",
- default=None)
- parser.add_option("-l", "--linkDir",
- dest="link_dir",
- help="Directory to contain links to all binaries for each version in"
- " the install directory. [REQUIRED]",
- default=None)
+ parser.add_option("-i", "--installDir", dest="install_dir",
+ help="Directory to install the download archive. [REQUIRED]", default=None)
+ parser.add_option("-l", "--linkDir", dest="link_dir",
+ help=("Directory to contain links to all binaries for each version in"
+ " the install directory. [REQUIRED]"), default=None)
editions = ["base", "enterprise", "targeted"]
- parser.add_option("-e", "--edition",
- dest="edition",
- choices=editions,
- help="Edition of the build to download, choose from {}, [default:"
- " '%default'].".format(editions),
- default="base")
- parser.add_option("-p", "--platform",
- dest="platform",
- help="Platform to download [REQUIRED]. Examples include: 'linux',"
- " 'osx', 'rhel62', 'windows'.",
- default=None)
- parser.add_option("-a", "--architecture",
- dest="architecture",
- help="Architecture to download, [default: '%default']. Examples include:"
- " 'arm64', 'ppc64le', 's390x' and 'x86_64'.",
- default="x86_64")
- parser.add_option("-u", "--useLatest",
- dest="use_latest",
- action="store_true",
- help="If specified, the latest (nightly) version will be downloaded,"
- " if it exists, for the version specified. For example, if specifying"
- " version 3.2 for download, the nightly version for 3.2 will be"
- " downloaded if it exists, otherwise the 'highest' version will be"
- " downloaded, i.e., '3.2.17'",
- default=False)
+ parser.add_option("-e", "--edition", dest="edition", choices=editions,
+ help=("Edition of the build to download, choose from {}, [default:"
+ " '%default'].".format(editions)), default="base")
+ parser.add_option("-p", "--platform", dest="platform",
+ help=("Platform to download [REQUIRED]. Examples include: 'linux',"
+ " 'osx', 'rhel62', 'windows'."), default=None)
+ parser.add_option("-a", "--architecture", dest="architecture",
+ help=("Architecture to download, [default: '%default']. Examples include:"
+ " 'arm64', 'ppc64le', 's390x' and 'x86_64'."), default="x86_64")
+ parser.add_option("-u", "--useLatest", dest="use_latest", action="store_true",
+ help=("If specified, the latest (nightly) version will be downloaded,"
+ " if it exists, for the version specified. For example, if specifying"
+ " version 3.2 for download, the nightly version for 3.2 will be"
+ " downloaded if it exists, otherwise the 'highest' version will be"
+ " downloaded, i.e., '3.2.17'"), default=False)
options, versions = parser.parse_args()
# Check for required options.
- if (not versions or
- not options.install_dir or
- not options.link_dir or
- not options.platform):
+ if (not versions or not options.install_dir or not options.link_dir or not options.platform):
parser.print_help()
parser.exit(1)
- downloader = MultiVersionDownloader(
- options.install_dir,
- options.link_dir,
- options.edition,
- options.platform,
- options.architecture,
- options.use_latest)
+ downloader = MultiVersionDownloader(options.install_dir, options.link_dir, options.edition,
+ options.platform, options.architecture, options.use_latest)
for version in versions:
downloader.download_install(version)
diff --git a/buildscripts/test_failures.py b/buildscripts/test_failures.py
index 29bd25a2d7b..644eadb65e2 100755
--- a/buildscripts/test_failures.py
+++ b/buildscripts/test_failures.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""
Utility for computing test failure rates from the Evergreen API.
"""
@@ -31,10 +30,9 @@ import yaml
LOGGER = logging.getLogger(__name__)
if sys.version_info[0] == 2:
- _STRING_TYPES = (basestring,)
+ _STRING_TYPES = (basestring, )
else:
- _STRING_TYPES = (str,)
-
+ _STRING_TYPES = (str, )
_ReportEntry = collections.namedtuple("_ReportEntry", [
"test",
@@ -175,13 +173,8 @@ class ReportEntry(_ReportEntry):
variant = next(iter(variant)) if len(variant) == 1 else ReportEntry._MULTIPLE_VARIANTS
distro = next(iter(distro)) if len(distro) == 1 else ReportEntry._MULTIPLE_DISTROS
- return ReportEntry(test=test,
- task=task,
- variant=variant,
- distro=distro,
- start_date=start_date,
- end_date=end_date,
- num_pass=num_pass,
+ return ReportEntry(test=test, task=task, variant=variant, distro=distro,
+ start_date=start_date, end_date=end_date, num_pass=num_pass,
num_fail=num_fail)
@@ -190,7 +183,7 @@ class Report(object):
A class for generating summarizations about Evergreen test executions.
"""
- TEST = ("test",)
+ TEST = ("test", )
TEST_TASK = ("test", "task")
TEST_TASK_VARIANT = ("test", "task", "variant")
TEST_TASK_VARIANT_DISTRO = ("test", "task", "variant", "distro")
@@ -251,9 +244,8 @@ class Report(object):
if not isinstance(component, _STRING_TYPES):
raise TypeError("Each element of 'components' argument must be a string")
elif component not in ReportEntry._fields:
- raise ValueError(
- "Each element of 'components' argument must be one of {}".format(
- ReportEntry._fields))
+ raise ValueError("Each element of 'components' argument must be one of {}".format(
+ ReportEntry._fields))
group_by = [operator.attrgetter(component) for component in components]
@@ -353,13 +345,8 @@ class TestHistory(object):
_MISSING_DISTRO = Missing("distro")
- def __init__(self,
- api_server=DEFAULT_API_SERVER,
- project=DEFAULT_PROJECT,
- tests=None,
- tasks=None,
- variants=None,
- distros=None):
+ def __init__(self, api_server=DEFAULT_API_SERVER, project=DEFAULT_PROJECT, tests=None,
+ tasks=None, variants=None, distros=None):
"""
Initializes the TestHistory instance with the list of tests, tasks, variants, and distros
specified.
@@ -384,9 +371,7 @@ class TestHistory(object):
project=project,
)
- def get_history_by_revision(self,
- start_revision,
- end_revision,
+ def get_history_by_revision(self, start_revision, end_revision,
test_statuses=DEFAULT_TEST_STATUSES,
task_statuses=DEFAULT_TASK_STATUSES):
"""
@@ -421,10 +406,7 @@ class TestHistory(object):
return history_data
- def get_history_by_date(self,
- start_date,
- end_date,
- test_statuses=DEFAULT_TEST_STATUSES,
+ def get_history_by_date(self, start_date, end_date, test_statuses=DEFAULT_TEST_STATUSES,
task_statuses=DEFAULT_TASK_STATUSES):
"""
Returns a list of ReportEntry instances corresponding to each individual test execution
@@ -485,8 +467,8 @@ class TestHistory(object):
LOGGER.debug("Request took %fs", round(time.time() - start, 2))
response.raise_for_status()
return self._get_json(response)
- except (requests.exceptions.HTTPError,
- requests.exceptions.ConnectionError, JSONResponseError) as err:
+ except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError,
+ JSONResponseError) as err:
if isinstance(err, JSONResponseError):
err = err.cause
retries += 1
@@ -527,13 +509,10 @@ class TestHistory(object):
return ReportEntry(
test=self._normalize_test_file(test_result["test_file"]),
- task=test_result["task_name"],
- variant=test_result["variant"],
- distro=test_result.get("distro", self._MISSING_DISTRO),
- start_date=start_date,
- end_date=end_date,
- num_pass=(1 if test_result["test_status"] == "pass" else 0),
- num_fail=(1 if test_result["test_status"] not in ("pass", "skip") else 0))
+ task=test_result["task_name"], variant=test_result["variant"], distro=test_result.get(
+ "distro", self._MISSING_DISTRO), start_date=start_date, end_date=end_date,
+ num_pass=(1 if test_result["test_status"] == "pass" else
+ 0), num_fail=(1 if test_result["test_status"] not in ("pass", "skip") else 0))
@staticmethod
def _normalize_test_file(test_file):
@@ -619,87 +598,72 @@ def main():
parser = optparse.OptionParser(description=main.__doc__,
usage="Usage: %prog [options] [test1 test2 ...]")
- parser.add_option("--project", dest="project",
- metavar="<project-name>",
+ parser.add_option("--project", dest="project", metavar="<project-name>",
default=TestHistory.DEFAULT_PROJECT,
help="The Evergreen project to analyze. Defaults to '%default'.")
today = datetime.datetime.utcnow().replace(microsecond=0, tzinfo=None)
- parser.add_option("--sinceDate", dest="since_date",
- metavar="<yyyy-mm-dd>",
+ parser.add_option("--sinceDate", dest="since_date", metavar="<yyyy-mm-dd>",
default="{:%Y-%m-%d}".format(today - datetime.timedelta(days=6)),
help=("The starting period as a date in UTC to analyze the test history for,"
" including the specified date. Defaults to 1 week ago (%default)."))
- parser.add_option("--untilDate", dest="until_date",
- metavar="<yyyy-mm-dd>",
+ parser.add_option("--untilDate", dest="until_date", metavar="<yyyy-mm-dd>",
default="{:%Y-%m-%d}".format(today),
help=("The ending period as a date in UTC to analyze the test history for,"
" including the specified date. Defaults to today (%default)."))
- parser.add_option("--sinceRevision", dest="since_revision",
- metavar="<gitrevision>",
+ parser.add_option("--sinceRevision", dest="since_revision", metavar="<gitrevision>",
default=None,
help=("The starting period as a git revision to analyze the test history for,"
" excluding the specified commit. This option must be specified in"
" conjuction with --untilRevision and takes precedence over --sinceDate"
" and --untilDate."))
- parser.add_option("--untilRevision", dest="until_revision",
- metavar="<gitrevision>",
+ parser.add_option("--untilRevision", dest="until_revision", metavar="<gitrevision>",
default=None,
help=("The ending period as a git revision to analyze the test history for,"
" including the specified commit. This option must be specified in"
" conjuction with --sinceRevision and takes precedence over --sinceDate"
" and --untilDate."))
- parser.add_option("--groupPeriod", dest="group_period",
- metavar="[{}]".format("|".join([Report.DAILY, Report.WEEKLY, "<ndays>"])),
- default=Report.WEEKLY,
+ parser.add_option("--groupPeriod", dest="group_period", metavar="[{}]".format(
+ "|".join([Report.DAILY, Report.WEEKLY, "<ndays>"])), default=Report.WEEKLY,
help=("The time period over which to group test executions. Defaults to"
" '%default'."))
parser.add_option("--weekStartDay", dest="start_day_of_week",
- choices=(Report.SUNDAY, Report.MONDAY, Report.FIRST_DAY),
- metavar="[{}]".format(
- "|".join([Report.SUNDAY, Report.MONDAY, Report.FIRST_DAY])),
- default=Report.FIRST_DAY,
+ choices=(Report.SUNDAY, Report.MONDAY,
+ Report.FIRST_DAY), metavar="[{}]".format(
+ "|".join([Report.SUNDAY, Report.MONDAY,
+ Report.FIRST_DAY])), default=Report.FIRST_DAY,
help=("The day to use as the beginning of the week when grouping over time."
" This option is only relevant in conjuction with --groupPeriod={}. If"
" '{}' is specified, then the day of week of the earliest date is used"
" as the beginning of the week. Defaults to '%default'.".format(
Report.WEEKLY, Report.FIRST_DAY)))
- parser.add_option("--tasks", dest="tasks",
- metavar="<task1,task2,...>",
- default="",
+ parser.add_option("--tasks", dest="tasks", metavar="<task1,task2,...>", default="",
help="Comma-separated list of Evergreen task names to analyze.")
- parser.add_option("--variants", dest="variants",
- metavar="<variant1,variant2,...>",
- default="",
+ parser.add_option("--variants", dest="variants", metavar="<variant1,variant2,...>", default="",
help="Comma-separated list of Evergreen build variants to analyze.")
- parser.add_option("--distros", dest="distros",
- metavar="<distro1,distro2,...>",
- default="",
+ parser.add_option("--distros", dest="distros", metavar="<distro1,distro2,...>", default="",
help="Comma-separated list of Evergreen build distros to analyze.")
parser.add_option("--numRequestRetries", dest="num_request_retries",
- metavar="<num-request-retries>",
- default=TestHistory.DEFAULT_NUM_RETRIES,
+ metavar="<num-request-retries>", default=TestHistory.DEFAULT_NUM_RETRIES,
help=("The number of times a request to the Evergreen API will be retried on"
" failure. Defaults to '%default'."))
(options, tests) = parser.parse_args()
- for (option_name, option_dest) in (("--sinceDate", "since_date"),
- ("--untilDate", "until_date")):
+ for (option_name, option_dest) in (("--sinceDate", "since_date"), ("--untilDate",
+ "until_date")):
option_value = getattr(options, option_dest)
try:
- setattr(options,
- option_dest,
- _parse_date(option_value))
+ setattr(options, option_dest, _parse_date(option_value))
except ValueError:
parser.print_help(file=sys.stderr)
print(file=sys.stderr)
@@ -754,26 +718,20 @@ def main():
api_server = "{url.scheme}://{url.netloc}".format(
url=urlparse(evg_config.get("api_server_host", TestHistory.DEFAULT_API_SERVER)))
- test_history = TestHistory(api_server=api_server,
- project=options.project,
- tests=tests,
- tasks=options.tasks.split(","),
- variants=options.variants.split(","),
+ test_history = TestHistory(api_server=api_server, project=options.project, tests=tests,
+ tasks=options.tasks.split(","), variants=options.variants.split(","),
distros=options.distros.split(","))
test_history.num_retries = options.num_request_retries
if options.since_revision:
- history_data = test_history.get_history_by_revision(
- start_revision=options.since_revision,
- end_revision=options.until_revision)
+ history_data = test_history.get_history_by_revision(start_revision=options.since_revision,
+ end_revision=options.until_revision)
elif options.since_date:
- history_data = test_history.get_history_by_date(
- start_date=options.since_date,
- end_date=options.until_date)
+ history_data = test_history.get_history_by_date(start_date=options.since_date,
+ end_date=options.until_date)
report = Report(history_data)
- summary = report.summarize_by(Report.TEST_TASK_VARIANT_DISTRO,
- time_period=options.group_period,
+ summary = report.summarize_by(Report.TEST_TASK_VARIANT_DISTRO, time_period=options.group_period,
start_day_of_week=options.start_day_of_week)
for entry in summary:
diff --git a/buildscripts/tests/__init__.py b/buildscripts/tests/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/__init__.py
+++ b/buildscripts/tests/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/ciconfig/__init__.py b/buildscripts/tests/ciconfig/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/ciconfig/__init__.py
+++ b/buildscripts/tests/ciconfig/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/ciconfig/test_evergreen.py b/buildscripts/tests/ciconfig/test_evergreen.py
index 294c313c447..fe31eb3dbab 100644
--- a/buildscripts/tests/ciconfig/test_evergreen.py
+++ b/buildscripts/tests/ciconfig/test_evergreen.py
@@ -8,7 +8,6 @@ import unittest
import buildscripts.ciconfig.evergreen as _evergreen
-
TEST_FILE_PATH = os.path.join(os.path.dirname(__file__), "evergreen.yml")
@@ -68,15 +67,12 @@ class TestTask(unittest.TestCase):
def test_from_dict(self):
task_dict = {
- "name": "compile",
- "depends_on": [],
- "commands": [
- {"func": "fetch source"},
- {"func": "run a task that passes"},
- {"func": "run a function with an arg",
- "vars": {"foobar": "TESTING: ONE"}},
- {"func": "run a function with an arg",
- "vars": {"foobar": "TESTING: TWO"}}]}
+ "name":
+ "compile", "depends_on": [],
+ "commands": [{"func": "fetch source"}, {"func": "run a task that passes"},
+ {"func": "run a function with an arg", "vars": {"foobar": "TESTING: ONE"}},
+ {"func": "run a function with an arg", "vars": {"foobar": "TESTING: TWO"}}]
+ }
task = _evergreen.Task(task_dict)
self.assertEqual("compile", task.name)
@@ -85,10 +81,12 @@ class TestTask(unittest.TestCase):
def test_resmoke_args(self):
task_dict = {
- "name": "jsCore",
- "commands": [
- {"func": "run tests",
- "vars": {"resmoke_args": "--suites=core --shellWriteMode=commands"}}]}
+ "name":
+ "jsCore", "commands": [{
+ "func": "run tests",
+ "vars": {"resmoke_args": "--suites=core --shellWriteMode=commands"}
+ }]
+ }
task = _evergreen.Task(task_dict)
self.assertEqual("--suites=core --shellWriteMode=commands", task.resmoke_args)
@@ -172,8 +170,9 @@ class TestVariant(unittest.TestCase):
def test_variant_tasks(self):
variant_ubuntu = self.conf.get_variant("ubuntu")
self.assertEqual(5, len(variant_ubuntu.tasks))
- for task_name in ["compile", "passing_test", "failing_test",
- "timeout_test", "resmoke_task"]:
+ for task_name in [
+ "compile", "passing_test", "failing_test", "timeout_test", "resmoke_task"
+ ]:
task = variant_ubuntu.get_task(task_name)
self.assertIsNotNone(task)
self.assertEqual(variant_ubuntu, task.variant)
@@ -194,5 +193,6 @@ class TestVariant(unittest.TestCase):
self.assertEqual("--suites=somesuite --storageEngine=mmapv1",
resmoke_task.combined_resmoke_args)
+
if __name__ == "__main__":
unittest.main()
diff --git a/buildscripts/tests/ciconfig/test_tags.py b/buildscripts/tests/ciconfig/test_tags.py
index cf1f9bef265..7f8e923ab10 100644
--- a/buildscripts/tests/ciconfig/test_tags.py
+++ b/buildscripts/tests/ciconfig/test_tags.py
@@ -1,4 +1,3 @@
-
"""Unit tests for the buildscripts.ciconfig.tags module."""
from __future__ import absolute_import
@@ -7,7 +6,6 @@ import unittest
import buildscripts.ciconfig.tags as _tags
-
TEST_FILE_PATH = os.path.join(os.path.dirname(__file__), "tags.yml")
@@ -158,6 +156,7 @@ class TestTagsConfig(unittest.TestCase):
def custom_cmp(tag_a, tag_b):
return cmp(tag_a.split("|"), tag_b.split("|"))
+
conf = _tags.TagsConfig.from_file(TEST_FILE_PATH, cmp_func=custom_cmp)
tags = conf.get_tags(test_kind, test_pattern)
diff --git a/buildscripts/tests/resmokelib/__init__.py b/buildscripts/tests/resmokelib/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/resmokelib/__init__.py
+++ b/buildscripts/tests/resmokelib/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/resmokelib/logging/test_buildlogger.py b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
index 7bc705948db..734f418bf10 100644
--- a/buildscripts/tests/resmokelib/logging/test_buildlogger.py
+++ b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
@@ -34,16 +34,15 @@ class TestLogsSplitter(unittest.TestCase):
def test_split_max_size_larger(self):
logs = self.__generate_logs(size=31)
max_size = 30
- self.assertEqual(
- [logs[0:-1], logs[-1:]],
- buildlogger._LogsSplitter.split_logs(logs, max_size))
+ self.assertEqual([logs[0:-1], logs[-1:]],
+ buildlogger._LogsSplitter.split_logs(logs, max_size))
logs = self.__generate_logs(size=149)
max_size = 19
- self.assertEqual(
- [logs[0:3], logs[3:6], logs[6:9], logs[9:12], logs[12:15],
- logs[15:18], logs[18:21], logs[21:24], logs[24:27], logs[27:]],
- buildlogger._LogsSplitter.split_logs(logs, max_size))
+ self.assertEqual([
+ logs[0:3], logs[3:6], logs[6:9], logs[9:12], logs[12:15], logs[15:18], logs[18:21],
+ logs[21:24], logs[24:27], logs[27:]
+ ], buildlogger._LogsSplitter.split_logs(logs, max_size))
def check_split_sizes(self, splits, max_size):
for split in splits:
diff --git a/buildscripts/tests/resmokelib/test_archival.py b/buildscripts/tests/resmokelib/test_archival.py
index e8a6b73d832..a21621060d9 100644
--- a/buildscripts/tests/resmokelib/test_archival.py
+++ b/buildscripts/tests/resmokelib/test_archival.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
""" Unit tests for archival. """
from __future__ import absolute_import
@@ -13,7 +12,6 @@ import unittest
from buildscripts.resmokelib.utils import archival
-
_BUCKET = "mongodatafiles"
@@ -90,22 +88,21 @@ class ArchivalFileTests(ArchivalTestCase):
s3_path = self.s3_path("unittest/no_file.tgz", False)
self.assertRaises(
OSError,
- lambda: self.archive.archive_files_to_s3(
- display_name, input_files, self.bucket, s3_path))
+ lambda: self.archive.archive_files_to_s3(display_name, input_files, self.bucket, s3_path)
+ )
# Invalid input_files in a list
input_files = ["no_file", "no_file2"]
s3_path = self.s3_path("unittest/no_files.tgz", False)
self.assertRaises(
OSError,
- lambda: self.archive.archive_files_to_s3(
- display_name, input_files, self.bucket, s3_path))
+ lambda: self.archive.archive_files_to_s3(display_name, input_files, self.bucket, s3_path)
+ )
# No files
display_name = "Unittest no files"
s3_path = self.s3_path("unittest/no_files.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, [], self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, [], self.bucket, s3_path)
self.assertEqual(1, status, message)
def test_files(self):
@@ -113,16 +110,16 @@ class ArchivalFileTests(ArchivalTestCase):
display_name = "Unittest valid file"
temp_file = tempfile.mkstemp(dir=self.temp_dir)[1]
s3_path = self.s3_path("unittest/valid_file.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
# 2 valid files
display_name = "Unittest 2 valid files"
temp_file2 = tempfile.mkstemp(dir=self.temp_dir)[1]
s3_path = self.s3_path("unittest/2valid_files.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, [temp_file, temp_file2], self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, [temp_file, temp_file2],
+ self.bucket, s3_path)
self.assertEqual(0, status, message)
def test_empty_directory(self):
@@ -130,15 +127,15 @@ class ArchivalFileTests(ArchivalTestCase):
display_name = "Unittest valid directory no files"
temp_dir = tempfile.mkdtemp(dir=self.temp_dir)
s3_path = self.s3_path("unittest/valid_directory.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_dir, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_dir, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
display_name = "Unittest valid directories no files"
temp_dir2 = tempfile.mkdtemp(dir=self.temp_dir)
s3_path = self.s3_path("unittest/valid_directories.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, [temp_dir, temp_dir2], self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, [temp_dir, temp_dir2],
+ self.bucket, s3_path)
self.assertEqual(0, status, message)
def test_directory(self):
@@ -148,8 +145,8 @@ class ArchivalFileTests(ArchivalTestCase):
# Create 10 empty files
for _ in xrange(10):
tempfile.mkstemp(dir=temp_dir)
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_dir, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_dir, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
display_name = "Unittest 2 valid directory files"
@@ -158,8 +155,8 @@ class ArchivalFileTests(ArchivalTestCase):
# Create 10 empty files
for _ in xrange(10):
tempfile.mkstemp(dir=temp_dir2)
- status, message = self.archive.archive_files_to_s3(
- display_name, [temp_dir, temp_dir2], self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, [temp_dir, temp_dir2],
+ self.bucket, s3_path)
self.assertEqual(0, status, message)
@@ -175,19 +172,19 @@ class ArchivalLimitSizeTests(ArchivalTestCase):
temp_file = tempfile.mkstemp(dir=self.temp_dir)[1]
create_random_file(temp_file, 3)
s3_path = self.s3_path("unittest/valid_limit_size.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
# Note the size limit is enforced after the file uploaded. Subsequent
# uploads will not be permitted, once the limit has been reached.
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
# Files beyond limit size
display_name = "Unittest over limit size"
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(1, status, message)
@@ -202,28 +199,28 @@ class ArchivalLimitFileTests(ArchivalTestCase):
display_name = "Unittest under limit number"
temp_file = tempfile.mkstemp(dir=self.temp_dir)[1]
s3_path = self.s3_path("unittest/valid_limit_number.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
# Files beyond limit number
display_name = "Unittest over limit number"
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(1, status, message)
class ArchivalLimitTests(ArchivalTestCase):
@classmethod
def create_archival(cls):
- return archival.Archival(
- cls.logger, limit_size_mb=3, limit_files=3, s3_client=cls.s3_client)
+ return archival.Archival(cls.logger, limit_size_mb=3, limit_files=3,
+ s3_client=cls.s3_client)
def test_limits(self):
@@ -232,17 +229,17 @@ class ArchivalLimitTests(ArchivalTestCase):
temp_file = tempfile.mkstemp(dir=self.temp_dir)[1]
create_random_file(temp_file, 1)
s3_path = self.s3_path("unittest/valid_limits.tgz")
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(0, status, message)
# Files beyond limits
display_name = "Unittest over limits"
- status, message = self.archive.archive_files_to_s3(
- display_name, temp_file, self.bucket, s3_path)
+ status, message = self.archive.archive_files_to_s3(display_name, temp_file, self.bucket,
+ s3_path)
self.assertEqual(1, status, message)
diff --git a/buildscripts/tests/resmokelib/test_selector.py b/buildscripts/tests/resmokelib/test_selector.py
index 83cc48c3a8d..197678041d3 100644
--- a/buildscripts/tests/resmokelib/test_selector.py
+++ b/buildscripts/tests/resmokelib/test_selector.py
@@ -1,4 +1,3 @@
-
"""Unit tests for the buildscripts.resmokelib.selector module."""
from __future__ import absolute_import
@@ -30,8 +29,7 @@ class TestExpressions(unittest.TestCase):
tag2 = "other_tag"
tags_match = [tag2, tag1, "third_tag"]
tags_nomatch = [tag2, "some_tag"]
- expression = selector.make_expression({
- "$allOf": [tag1, tag2]})
+ expression = selector.make_expression({"$allOf": [tag1, tag2]})
self.assertIsInstance(expression, selector._AllOfExpression)
self.assertTrue(expression(tags_match))
self.assertFalse(expression(tags_nomatch))
@@ -42,8 +40,7 @@ class TestExpressions(unittest.TestCase):
tag2 = "other_tag"
tags_match = [tag1, "third_tag"]
tags_nomatch = ["third_tag", "some_tag"]
- expression = selector.make_expression({
- "$anyOf": [tag1, tag2]})
+ expression = selector.make_expression({"$anyOf": [tag1, tag2]})
self.assertIsInstance(expression, selector._AnyOfExpression)
self.assertTrue(expression(tags_match))
self.assertFalse(expression(tags_nomatch))
@@ -70,11 +67,10 @@ class TestExpressions(unittest.TestCase):
tags_nomatch_3 = [tag2, "other_tag_2"]
tags_nomatch_4 = [tag2]
tags_nomatch_5 = ["other_tag_2"]
- expression = selector.make_expression({
- "$allOf": [
- {"$anyOf": [tag1, tag2]},
- tag3,
- ]})
+ expression = selector.make_expression({"$allOf": [
+ {"$anyOf": [tag1, tag2]},
+ tag3,
+ ]})
self.assertIsInstance(expression, selector._AllOfExpression)
self.assertTrue(expression(tags_match_1))
self.assertTrue(expression(tags_match_2))
@@ -89,12 +85,10 @@ class TestExpressions(unittest.TestCase):
with self.assertRaises(ValueError):
selector.make_expression({"invalid": ["tag1", "tag2"]})
with self.assertRaises(ValueError):
- selector.make_expression({"$anyOf": ["tag1", "tag2"],
- "invalid": "tag3"})
+ selector.make_expression({"$anyOf": ["tag1", "tag2"], "invalid": "tag3"})
class TestTestFileExplorer(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_file_explorer = selector.TestFileExplorer()
@@ -111,22 +105,18 @@ class TestTestFileExplorer(unittest.TestCase):
class MockTestFileExplorer(object):
"""Component giving access to mock test files data."""
+
def __init__(self):
- self.files = ["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js",
- "build/testA",
- "build/testB",
- "build/testC",
- "dbtest"]
- self.tags = {"dir/subdir1/test11.js": ["tag1", "tag2"],
- "dir/subdir1/test12.js": ["tag3"],
- "dir/subdir2/test21.js": ["tag2", "tag4"],
- "dir/subdir3/a/test3a1.js": ["tag4", "tag5"]}
+ self.files = [
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js", "build/testA", "build/testB", "build/testC", "dbtest"
+ ]
+ self.tags = {
+ "dir/subdir1/test11.js": ["tag1", "tag2"], "dir/subdir1/test12.js": ["tag3"],
+ "dir/subdir2/test21.js": ["tag2", "tag4"], "dir/subdir3/a/test3a1.js": ["tag4", "tag5"]
+ }
self.binary = "dbtest"
- self.jstest_tag_file = {"dir/subdir1/test11.js": "tagA",
- "dir/subdir3/a/test3a1.js": "tagB"}
+ self.jstest_tag_file = {"dir/subdir1/test11.js": "tagA", "dir/subdir3/a/test3a1.js": "tagB"}
def is_glob_pattern(self, pattern):
return globstar.is_glob_pattern(pattern)
@@ -159,7 +149,6 @@ class MockTestFileExplorer(object):
class TestTestList(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_file_explorer = MockTestFileExplorer()
@@ -198,8 +187,7 @@ class TestTestList(unittest.TestCase):
test_list.include_files(["dir/subdir2/test21.js"])
selected, excluded = test_list.get_tests()
self.assertEqual(["dir/subdir2/test21.js"], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js"], excluded)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir1/test12.js"], excluded)
def test_include_files_no_match(self):
roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
@@ -207,17 +195,15 @@ class TestTestList(unittest.TestCase):
test_list.include_files(["dir/subdir2/test26.js"])
selected, excluded = test_list.get_tests()
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js"], excluded)
def test_exclude_files(self):
roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.exclude_files(["dir/subdir2/test21.js"])
selected, excluded = test_list.get_tests()
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js"], selected)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir1/test12.js"], selected)
self.assertEqual(["dir/subdir2/test21.js"], excluded)
def test_exclude_files_no_match(self):
@@ -231,25 +217,21 @@ class TestTestList(unittest.TestCase):
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.exclude_files(["dir/subdir2/*.js"])
selected, excluded = test_list.get_tests()
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js"], selected)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir1/test12.js"], selected)
self.assertEqual(["dir/subdir2/test21.js"], excluded)
def test_match_tag_expression(self):
roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
test_list = selector._TestList(self.test_file_explorer, roots)
- expression = selector.make_expression({"$anyOf": [
- {"$allOf": ["tag1", "tag2"]},
- "tag3",
- {"$allOf": ["tag5", "tag6"]}]})
+ expression = selector.make_expression(
+ {"$anyOf": [{"$allOf": ["tag1", "tag2"]}, "tag3", {"$allOf": ["tag5", "tag6"]}]})
def get_tags(test_file):
return self.test_file_explorer.jstest_tags(test_file)
test_list.match_tag_expression(expression, get_tags)
selected, excluded = test_list.get_tests()
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js"], selected)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir1/test12.js"], selected)
self.assertEqual(["dir/subdir2/test21.js"], excluded)
def test_include_any_pattern(self):
@@ -259,42 +241,40 @@ class TestTestList(unittest.TestCase):
test_list.include_any_pattern(["dir/*3/a/*"])
selected, excluded = test_list.get_tests()
self.assertEqual(["dir/subdir3/a/test3a1.js"], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js"], excluded)
# 1 pattern and 0 matching
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.include_any_pattern(["dir/*4/a/*"])
selected, excluded = test_list.get_tests()
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], excluded)
# 3 patterns and 1 matching
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.include_any_pattern(["dir/*3/a/*", "notmaching/*", "notmatching2/*.js"])
selected, excluded = test_list.get_tests()
self.assertEqual(["dir/subdir3/a/test3a1.js"], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js"], excluded)
# 3 patterns and 0 matching
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.include_any_pattern(["dir2/*3/a/*", "notmaching/*", "notmatching2/*.js"])
selected, excluded = test_list.get_tests()
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], excluded)
# 3 patterns and 3 matching
test_list = selector._TestList(self.test_file_explorer, roots)
test_list.include_any_pattern(["dir/*1/*11*", "dir/subdir3/**", "dir/subdir2/*.js"])
selected, excluded = test_list.get_tests()
- self.assertEqual(["dir/subdir1/test11.js", "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"],
- selected)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir2/test21.js", "dir/subdir3/a/test3a1.js"],
+ selected)
self.assertEqual(["dir/subdir1/test12.js"], excluded)
def test_include_tests_no_force(self):
@@ -304,9 +284,8 @@ class TestTestList(unittest.TestCase):
test_list.include_files(["dir/subdir1/test11.js"], force=False)
selected, excluded = test_list.get_tests()
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js"], excluded)
def test_include_tests_force(self):
roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
@@ -315,8 +294,7 @@ class TestTestList(unittest.TestCase):
test_list.include_files(["dir/subdir1/test11.js"], force=True)
selected, excluded = test_list.get_tests()
self.assertEqual(["dir/subdir1/test11.js"], selected)
- self.assertEqual(["dir/subdir1/test12.js",
- "dir/subdir2/test21.js"], excluded)
+ self.assertEqual(["dir/subdir1/test12.js", "dir/subdir2/test21.js"], excluded)
def test_tests_are_not_files(self):
roots = ["a", "b"]
@@ -343,60 +321,59 @@ class TestSelector(unittest.TestCase):
cls.selector = selector._Selector(MockTestFileExplorer())
def test_select_all(self):
- config = selector._SelectorConfig(roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"])
+ config = selector._SelectorConfig(
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"])
selected, excluded = self.selector.select(config)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], selected)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], selected)
self.assertEqual([], excluded)
def test_select_exclude_files(self):
- config = selector._SelectorConfig(roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"],
- exclude_files=["dir/subdir2/test21.js"])
+ config = selector._SelectorConfig(
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
+ "dir/subdir3/a/*.js"], exclude_files=["dir/subdir2/test21.js"])
selected, excluded = self.selector.select(config)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir3/a/test3a1.js"], selected)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"],
+ selected)
self.assertEqual(["dir/subdir2/test21.js"], excluded)
def test_select_include_files(self):
- config = selector._SelectorConfig(roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"],
- include_files=["dir/subdir2/test21.js"])
+ config = selector._SelectorConfig(
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
+ "dir/subdir3/a/*.js"], include_files=["dir/subdir2/test21.js"])
selected, excluded = self.selector.select(config)
self.assertEqual(["dir/subdir2/test21.js"], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"],
+ excluded)
def test_select_include_tags(self):
- config = selector._SelectorConfig(roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"],
- include_tags="tag1")
+ config = selector._SelectorConfig(
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
+ "dir/subdir3/a/*.js"], include_tags="tag1")
selected, excluded = self.selector.select(config)
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], excluded)
def test_select_include_any_tags(self):
- config = selector._SelectorConfig(roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"],
- include_with_any_tags=["tag1"])
+ config = selector._SelectorConfig(
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
+ "dir/subdir3/a/*.js"], include_with_any_tags=["tag1"])
selected, excluded = self.selector.select(config)
self.assertEqual([], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], excluded)
class TestFilterTests(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_file_explorer = MockTestFileExplorer()
@@ -407,25 +384,27 @@ class TestFilterTests(unittest.TestCase):
def test_cpp_all(self):
config = {"root": "integrationtest.txt"}
- selected, excluded = selector.filter_tests("cpp_integration_test", config, self.test_file_explorer)
+ selected, excluded = selector.filter_tests("cpp_integration_test", config,
+ self.test_file_explorer)
self.assertEqual(["build/testA", "build/testB"], selected)
self.assertEqual([], excluded)
def test_cpp_roots_override(self):
# When roots are specified for cpp tests they override all filtering since
# 'roots' are populated with the command line arguments.
- config = {"include_files": "unknown_file",
- "roots": ["build/testC"]}
+ config = {"include_files": "unknown_file", "roots": ["build/testC"]}
selected, excluded = selector.filter_tests("cpp_unit_test", config, self.test_file_explorer)
self.assertEqual(["build/testC"], selected)
self.assertEqual([], excluded)
- selected, excluded = selector.filter_tests("cpp_integration_test", config, self.test_file_explorer)
+ selected, excluded = selector.filter_tests("cpp_integration_test", config,
+ self.test_file_explorer)
self.assertEqual(["build/testC"], selected)
self.assertEqual([], excluded)
def test_cpp_expand_roots(self):
config = {"root": "integrationtest.txt", "roots": ["build/test*"]}
- selected, excluded = selector.filter_tests("cpp_integration_test", config, self.test_file_explorer)
+ selected, excluded = selector.filter_tests("cpp_integration_test", config,
+ self.test_file_explorer)
self.assertEqual(["build/testA", "build/testB", "build/testC"], selected)
self.assertEqual([], excluded)
@@ -437,60 +416,61 @@ class TestFilterTests(unittest.TestCase):
buildscripts.resmokelib.config.INCLUDE_WITH_ANY_TAGS = ["tag1"]
try:
selector_config = {"root": "unittest.txt"}
- selected, excluded = selector.filter_tests(
- "cpp_unit_test",
- selector_config,
- test_file_explorer=self.test_file_explorer)
+ selected, excluded = selector.filter_tests("cpp_unit_test", selector_config,
+ test_file_explorer=self.test_file_explorer)
self.assertEqual([], selected)
self.assertEqual(["build/testA", "build/testB"], excluded)
finally:
buildscripts.resmokelib.config.INCLUDE_WITH_ANY_TAGS = None
def test_jstest_include_tags(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "include_tags": "tag1"}
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "include_tags": "tag1"
+ }
selected, excluded = selector.filter_tests("js_test", config, self.test_file_explorer)
self.assertEqual(["dir/subdir1/test11.js"], selected)
- self.assertEqual(["dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test12.js", "dir/subdir2/test21.js", "dir/subdir3/a/test3a1.js"],
+ excluded)
def test_jstest_exclude_tags(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "exclude_tags": "tag1"}
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "exclude_tags": "tag1"
+ }
selected, excluded = selector.filter_tests("js_test", config, self.test_file_explorer)
- self.assertEqual(["dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], selected)
+ self.assertEqual(
+ ["dir/subdir1/test12.js", "dir/subdir2/test21.js", "dir/subdir3/a/test3a1.js"],
+ selected)
self.assertEqual(["dir/subdir1/test11.js"], excluded)
def test_jstest_force_include(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "include_files": ["dir/subdir1/*.js"],
- "exclude_tags": "tag1"}
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "include_files": ["dir/subdir1/*.js"], "exclude_tags": "tag1"
+ }
selected, excluded = selector.filter_tests("js_test", config, self.test_file_explorer)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js"], selected)
- self.assertEqual(["dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir1/test12.js"], selected)
+ self.assertEqual(["dir/subdir2/test21.js", "dir/subdir3/a/test3a1.js"], excluded)
def test_jstest_all(self):
config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"]}
selected, excluded = selector.filter_tests("js_test", config, self.test_file_explorer)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir2/test21.js",
- "dir/subdir3/a/test3a1.js"], selected)
+ self.assertEqual([
+ "dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir2/test21.js",
+ "dir/subdir3/a/test3a1.js"
+ ], selected)
self.assertEqual([], excluded)
def test_jstest_include_with_any_tags(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "include_with_any_tags": ["tag2"]}
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "include_with_any_tags": ["tag2"]
+ }
selected, excluded = selector.filter_tests("js_test", config, self.test_file_explorer)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir2/test21.js"], selected)
- self.assertEqual(["dir/subdir1/test12.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual(["dir/subdir1/test11.js", "dir/subdir2/test21.js"], selected)
+ self.assertEqual(["dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"], excluded)
def test_jstest_unknown_file(self):
config = {"roots": ["dir/subdir1/*.js", "dir/subdir1/unknown"]}
@@ -498,22 +478,28 @@ class TestFilterTests(unittest.TestCase):
selector.filter_tests("js_test", config, self.test_file_explorer)
def test_json_schema_exclude_files(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "exclude_files": ["dir/subdir2/test21.js"]}
- selected, excluded = selector.filter_tests("json_schema_test", config, self.test_file_explorer)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir3/a/test3a1.js"], selected)
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "exclude_files": ["dir/subdir2/test21.js"]
+ }
+ selected, excluded = selector.filter_tests("json_schema_test", config,
+ self.test_file_explorer)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"],
+ selected)
self.assertEqual(["dir/subdir2/test21.js"], excluded)
def test_json_shcema_include_files(self):
- config = {"roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
- "include_files": ["dir/subdir2/test21.js"]}
- selected, excluded = selector.filter_tests("json_schema_test", config, self.test_file_explorer)
+ config = {
+ "roots": ["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ "include_files": ["dir/subdir2/test21.js"]
+ }
+ selected, excluded = selector.filter_tests("json_schema_test", config,
+ self.test_file_explorer)
self.assertEqual(["dir/subdir2/test21.js"], selected)
- self.assertEqual(["dir/subdir1/test11.js",
- "dir/subdir1/test12.js",
- "dir/subdir3/a/test3a1.js"], excluded)
+ self.assertEqual(
+ ["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"],
+ excluded)
def test_db_tests_all(self):
config = {"binary": self.test_file_explorer.binary}
@@ -524,16 +510,16 @@ class TestFilterTests(unittest.TestCase):
def test_db_tests_roots_override(self):
# When roots are specified for db_tests they override all filtering since
# 'roots' are populated with the command line arguments.
- config = {"binary": self.test_file_explorer.binary,
- "include_suites": ["dbtestB"],
- "roots": ["dbtestOverride"]}
+ config = {
+ "binary": self.test_file_explorer.binary, "include_suites": ["dbtestB"],
+ "roots": ["dbtestOverride"]
+ }
selected, excluded = selector.filter_tests("db_test", config, self.test_file_explorer)
self.assertEqual(["dbtestOverride"], selected)
self.assertEqual([], excluded)
def test_db_tests_include_suites(self):
- config = {"binary": self.test_file_explorer.binary,
- "include_suites": ["dbtestB"]}
+ config = {"binary": self.test_file_explorer.binary, "include_suites": ["dbtestB"]}
selected, excluded = selector.filter_tests("db_test", config, self.test_file_explorer)
self.assertEqual(["dbtestB"], selected)
self.assertEqual(["dbtestA", "dbtestC"], excluded)
diff --git a/buildscripts/tests/resmokelib/testing/__init__.py b/buildscripts/tests/resmokelib/testing/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/resmokelib/testing/__init__.py
+++ b/buildscripts/tests/resmokelib/testing/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py b/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
index f098259860e..ea2ce03b110 100644
--- a/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
+++ b/buildscripts/tests/resmokelib/testing/fixtures/test_interface.py
@@ -18,7 +18,6 @@ class TestFixture(unittest.TestCase):
class TestFixtureTeardownHandler(unittest.TestCase):
-
def test_teardown_ok(self):
handler = interface.FixtureTeardownHandler(logging.getLogger("handler_unittests"))
# Before any teardown.
diff --git a/buildscripts/tests/resmokelib/testing/hooks/__init__.py b/buildscripts/tests/resmokelib/testing/hooks/__init__.py
index e69de29bb2d..4b7a2bb941b 100644
--- a/buildscripts/tests/resmokelib/testing/hooks/__init__.py
+++ b/buildscripts/tests/resmokelib/testing/hooks/__init__.py
@@ -0,0 +1 @@
+"""Empty."""
diff --git a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
index a24c9d900c2..ad2f331d816 100755
--- a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
+++ b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
@@ -11,61 +11,35 @@ import buildscripts.resmokelib.config as _config
import buildscripts.resmokelib.testing.hooks.combine_benchmark_results as cbr
_BM_CONTEXT = {
- "date": "2018/01/30-18:40:25",
- "num_cpus": 40,
- "mhz_per_cpu": 4999,
- "cpu_scaling_enabled": False,
- "library_build_type": "debug"
+ "date": "2018/01/30-18:40:25", "num_cpus": 40, "mhz_per_cpu": 4999,
+ "cpu_scaling_enabled": False, "library_build_type": "debug"
}
_BM_REPORT = {
- "name": "BM_Name1",
- "iterations": 1000,
- "real_time": 1202,
- "cpu_time": 1303,
- "bytes_per_second": 1404,
- "items_per_second": 1505,
- "custom_counter_1": 1606
+ "name": "BM_Name1", "iterations": 1000, "real_time": 1202, "cpu_time": 1303,
+ "bytes_per_second": 1404, "items_per_second": 1505, "custom_counter_1": 1606
}
_BM_MEAN_REPORT = {
- "name": "BM_Name1_mean",
- "iterations": 1000,
- "real_time": 1200,
- "cpu_time": 1300,
- "bytes_per_second": 1400,
- "items_per_second": 1500,
- "custom_counter_1": 1600
+ "name": "BM_Name1_mean", "iterations": 1000, "real_time": 1200, "cpu_time": 1300,
+ "bytes_per_second": 1400, "items_per_second": 1500, "custom_counter_1": 1600
}
_BM_MULTITHREAD_REPORT = {
- "name": "BM_Name2/threads:10",
- "iterations": 100,
- "real_time": 202,
- "cpu_time": 303,
- "bytes_per_second": 404,
- "items_per_second": 505,
- "custom_counter_1": 606
+ "name": "BM_Name2/threads:10", "iterations": 100, "real_time": 202, "cpu_time": 303,
+ "bytes_per_second": 404, "items_per_second": 505, "custom_counter_1": 606
}
_BM_MULTITHREAD_MEDIAN_REPORT = {
- "name": "BM_Name2/threads:10_median",
- "iterations": 100,
- "real_time": 200,
- "cpu_time": 300,
- "bytes_per_second": 400,
- "items_per_second": 500,
- "custom_counter_1": 600
+ "name": "BM_Name2/threads:10_median", "iterations": 100, "real_time": 200, "cpu_time": 300,
+ "bytes_per_second": 400, "items_per_second": 500, "custom_counter_1": 600
}
_BM_FULL_REPORT = {
- "context": _BM_CONTEXT,
- "benchmarks": [
- _BM_REPORT,
- _BM_MEAN_REPORT,
- _BM_MULTITHREAD_REPORT,
- _BM_MULTITHREAD_MEDIAN_REPORT
- ]
+ "context":
+ _BM_CONTEXT, "benchmarks": [
+ _BM_REPORT, _BM_MEAN_REPORT, _BM_MULTITHREAD_REPORT, _BM_MULTITHREAD_MEDIAN_REPORT
+ ]
}
# 12/31/2999 @ 11:59pm (UTC)
@@ -91,7 +65,6 @@ class CombineBenchmarkResultsFixture(unittest.TestCase):
class TestCombineBenchmarkResults(CombineBenchmarkResultsFixture):
-
def test_generate_reports(self):
report = self.cbr_hook._generate_perf_plugin_report()
@@ -105,7 +78,6 @@ class TestCombineBenchmarkResults(CombineBenchmarkResultsFixture):
class TestBenchmarkThreadsReport(CombineBenchmarkResultsFixture):
-
def test_thread_from_name(self):
thread = self.bm_threads_report._thread_from_name("BM_Name/arg name:100/threads:10")
self.assertEqual(thread, "10")
diff --git a/buildscripts/tests/test_aws_ec2.py b/buildscripts/tests/test_aws_ec2.py
index be2e3ddef92..3886e5a5b68 100755
--- a/buildscripts/tests/test_aws_ec2.py
+++ b/buildscripts/tests/test_aws_ec2.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Unit test for buildscripts/aws_ec2.py."""
from __future__ import absolute_import
@@ -23,8 +22,8 @@ class AwsEc2TestCase(unittest.TestCase):
self.security_groups = None
self.expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
self.tags = [{"Key": "expire-on", "Value": self.expire_dt.strftime("%Y-%m-%d %H:%M:%S")},
- {"Key": "Name", "Value": "Unittest AWS EC2 Launcher"},
- {"Key": "owner", "Value": ""}]
+ {"Key": "Name",
+ "Value": "Unittest AWS EC2 Launcher"}, {"Key": "owner", "Value": ""}]
def tearDown(self):
for instance in self.launched_instances:
@@ -39,75 +38,48 @@ class AwsEc2Connect(AwsEc2TestCase):
class AwsEc2Launch(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- block_devices={"xvde": 5, "xvdf": 10},
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, block_devices={"xvde": 5, "xvdf": 10}, instance_type=self.instance_type,
+ key_name=self.key_name, security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags,
- DryRun=True)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags, DryRun=True)
self.assertEqual(1, code, ret)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags,
- InvalidParam=True)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags, InvalidParam=True)
self.assertEqual(1, code, ret)
code, ret = self.aws_ec2.launch_instance(
- ami="ami-bad_ami",
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami="ami-bad_ami", instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertNotEqual(0, code, ret)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type="bad_instance_type",
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type="bad_instance_type", key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertNotEqual(0, code, ret)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name="bad_key_name",
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name="bad_key_name",
+ security_groups=self.security_groups, tags=self.tags)
self.assertNotEqual(0, code, ret)
class AwsEc2LaunchStatus(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
self.assertEqual(self.ami, ret.image_id, ret)
@@ -119,12 +91,8 @@ class AwsEc2LaunchStatus(AwsEc2TestCase):
self.assertTrue(tag in ret.tags, ret)
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags,
- wait_time_secs=300,
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags, wait_time_secs=300,
show_progress=True)
self.assertEqual("running", ret.state["Name"], ret)
self.assertIsNotNone(ret.public_ip_address, ret)
@@ -135,17 +103,12 @@ class AwsEc2LaunchStatus(AwsEc2TestCase):
class AwsEc2ControlStatus(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
- code, ret = self.aws_ec2.control_instance(
- mode="status",
- image_id=ret.instance_id)
+ code, ret = self.aws_ec2.control_instance(mode="status", image_id=ret.instance_id)
self.assertEqual(0, code, ret)
self.assertEqual(self.ami, ret.image_id, ret)
self.assertEqual(self.instance_type, ret.instance_type, ret)
@@ -160,9 +123,7 @@ class AwsEc2ControlStatus(AwsEc2TestCase):
self.assertIsNotNone(ret.state["Name"], ret)
self.assertIsNotNone(ret.tags, ret)
- self.assertRaises(ValueError,
- self.aws_ec2.control_instance,
- mode="bad_mode",
+ self.assertRaises(ValueError, self.aws_ec2.control_instance, mode="bad_mode",
image_id=ret.instance_id)
code, ret = self.aws_ec2.control_instance(mode="status", image_id="bad_id")
@@ -173,11 +134,8 @@ class AwsEc2ControlStatus(AwsEc2TestCase):
class AwsEc2ControlStart(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
@@ -189,11 +147,8 @@ class AwsEc2ControlStart(AwsEc2TestCase):
class AwsEc2ControlStartReboot(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
@@ -209,12 +164,8 @@ class AwsEc2ControlStartReboot(AwsEc2TestCase):
class AwsEc2ControlStop(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags,
- wait_time_secs=60)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags, wait_time_secs=60)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
@@ -234,11 +185,8 @@ class AwsEc2ControlStop(AwsEc2TestCase):
class AwsEc2ControlTerminate(AwsEc2TestCase):
def runTest(self):
code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=self.tags)
+ ami=self.ami, instance_type=self.instance_type, key_name=self.key_name,
+ security_groups=self.security_groups, tags=self.tags)
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
@@ -252,12 +200,9 @@ class AwsEc2ControlTerminate(AwsEc2TestCase):
class AwsEc2TagInstance(AwsEc2TestCase):
def runTest(self):
- code, ret = self.aws_ec2.launch_instance(
- ami=self.ami,
- instance_type=self.instance_type,
- key_name=self.key_name,
- security_groups=self.security_groups,
- tags=[])
+ code, ret = self.aws_ec2.launch_instance(ami=self.ami, instance_type=self.instance_type,
+ key_name=self.key_name,
+ security_groups=self.security_groups, tags=[])
self.assertEqual(0, code, ret)
self.launched_instances.append(ret.instance_id)
diff --git a/buildscripts/tests/test_fetch_test_lifecycle.py b/buildscripts/tests/test_fetch_test_lifecycle.py
index 8c581fd1b7c..25fac72766b 100644
--- a/buildscripts/tests/test_fetch_test_lifecycle.py
+++ b/buildscripts/tests/test_fetch_test_lifecycle.py
@@ -8,53 +8,42 @@ import buildscripts.fetch_test_lifecycle as fetch
class TestFetchTestLifecycle(unittest.TestCase):
def test_get_metadata_revision(self):
- metadata_repo = MockMetadataRepository([("metadata_revision_05", "mongo_revision_06"),
- ("metadata_revision_04", "mongo_revision_06"),
- ("metadata_revision_03", "mongo_revision_02"),
- ("metadata_revision_02", "mongo_revision_02"),
- ("metadata_revision_01", None)])
-
- mongo_repo = MockMongoRepository(["mongo_revision_07",
- "mongo_revision_06",
- "mongo_revision_05",
- "mongo_revision_04",
- "mongo_revision_03",
- "mongo_revision_02",
- "mongo_revision_01"])
-
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_07",
+ metadata_repo = MockMetadataRepository(
+ [("metadata_revision_05", "mongo_revision_06"),
+ ("metadata_revision_04", "mongo_revision_06"), ("metadata_revision_03",
+ "mongo_revision_02"),
+ ("metadata_revision_02", "mongo_revision_02"), ("metadata_revision_01", None)])
+
+ mongo_repo = MockMongoRepository([
+ "mongo_revision_07", "mongo_revision_06", "mongo_revision_05", "mongo_revision_04",
+ "mongo_revision_03", "mongo_revision_02", "mongo_revision_01"
+ ])
+
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_07",
"metadata_revision_05")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_06",
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_06",
"metadata_revision_05")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_05",
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_05",
"metadata_revision_03")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_04",
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_04",
"metadata_revision_03")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_03",
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_03",
"metadata_revision_03")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_02",
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_02",
"metadata_revision_03")
- self._check_metadata_revision(metadata_repo, mongo_repo,
- "mongo_revision_01",
- None)
+ self._check_metadata_revision(metadata_repo, mongo_repo, "mongo_revision_01", None)
def _check_metadata_revision(self, metadata_repo, mongo_repo, mongo_revision,
expected_metadata_revision):
- metadata_revision = fetch._get_metadata_revision(metadata_repo, mongo_repo, "project",
- mongo_revision)
- self.assertEqual(expected_metadata_revision, metadata_revision)
+ metadata_revision = fetch._get_metadata_revision(metadata_repo, mongo_repo, "project",
+ mongo_revision)
+ self.assertEqual(expected_metadata_revision, metadata_revision)
class MockMongoRepository(object):
@@ -62,8 +51,8 @@ class MockMongoRepository(object):
self.revisions = revisions
def is_ancestor(self, parent, child):
- return (parent in self.revisions and child in self.revisions and
- self.revisions.index(parent) >= self.revisions.index(child))
+ return (parent in self.revisions and child in self.revisions
+ and self.revisions.index(parent) >= self.revisions.index(child))
class MockMetadataRepository(object):
diff --git a/buildscripts/tests/test_git.py b/buildscripts/tests/test_git.py
index a9db15ee43c..6a49e894581 100644
--- a/buildscripts/tests/test_git.py
+++ b/buildscripts/tests/test_git.py
@@ -9,7 +9,6 @@ import buildscripts.git as _git
class TestRepository(unittest.TestCase):
-
def setUp(self):
self.subprocess = MockSubprocess()
_git.subprocess = self.subprocess
diff --git a/buildscripts/tests/test_remote_operations.py b/buildscripts/tests/test_remote_operations.py
index b468c45ebee..0218cb28ece 100755
--- a/buildscripts/tests/test_remote_operations.py
+++ b/buildscripts/tests/test_remote_operations.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Unit test for buildscripts/remote_operations.py.
Note - Tests require sshd to be enabled on localhost with paswordless login
@@ -22,8 +21,8 @@ class RemoteOperationsTestCase(unittest.TestCase):
self.temp_remote_dir = tempfile.mkdtemp()
self.rop = rop.RemoteOperations(user_host="localhost")
self.rop_use_shell = rop.RemoteOperations(user_host="localhost", use_shell=True)
- self.rop_sh_shell_binary = rop.RemoteOperations(
- user_host="localhost", shell_binary="/bin/sh")
+ self.rop_sh_shell_binary = rop.RemoteOperations(user_host="localhost",
+ shell_binary="/bin/sh")
self.rop_ssh_opts = rop.RemoteOperations(
user_host="localhost",
ssh_connection_options="-v -o ConnectTimeout=10 -o ConnectionAttempts=10")
@@ -63,8 +62,8 @@ class RemoteOperationConnection(RemoteOperationsTestCase):
# Valid host with invalid ssh options
ssh_connection_options = "-o invalid"
- remote_op = rop.RemoteOperations(
- user_host="localhost", ssh_connection_options=ssh_connection_options)
+ remote_op = rop.RemoteOperations(user_host="localhost",
+ ssh_connection_options=ssh_connection_options)
ret, buff = remote_op.access_info()
self.assertFalse(remote_op.access_established())
self.assertNotEqual(0, ret)
@@ -79,8 +78,8 @@ class RemoteOperationConnection(RemoteOperationsTestCase):
# Valid host with valid ssh options
ssh_connection_options = "-v -o ConnectTimeout=10 -o ConnectionAttempts=10"
- remote_op = rop.RemoteOperations(
- user_host="localhost", ssh_connection_options=ssh_connection_options)
+ remote_op = rop.RemoteOperations(user_host="localhost",
+ ssh_connection_options=ssh_connection_options)
ret, buff = remote_op.access_info()
self.assertTrue(remote_op.access_established())
self.assertEqual(0, ret)
@@ -95,10 +94,9 @@ class RemoteOperationConnection(RemoteOperationsTestCase):
ssh_connection_options = "-v -o ConnectTimeout=10 -o ConnectionAttempts=10"
ssh_options = "-t"
- remote_op = rop.RemoteOperations(
- user_host="localhost",
- ssh_connection_options=ssh_connection_options,
- ssh_options=ssh_options)
+ remote_op = rop.RemoteOperations(user_host="localhost",
+ ssh_connection_options=ssh_connection_options,
+ ssh_options=ssh_options)
ret, buff = remote_op.access_info()
self.assertTrue(remote_op.access_established())
self.assertEqual(0, ret)
@@ -176,17 +174,13 @@ class RemoteOperationShell(RemoteOperationsTestCase):
self.assertIsNotNone(buff)
# Command with directory and pipe
- ret, buff = self.rop.shell(
- "touch {dir}/{file}; ls {dir} | grep {file}".format(
- file=time.time(),
- dir="/tmp"))
+ ret, buff = self.rop.shell("touch {dir}/{file}; ls {dir} | grep {file}".format(
+ file=time.time(), dir="/tmp"))
self.assertEqual(0, ret)
self.assertIsNotNone(buff)
- ret, buff = self.rop_use_shell.shell(
- "touch {dir}/{file}; ls {dir} | grep {file}".format(
- file=time.time(),
- dir="/tmp"))
+ ret, buff = self.rop_use_shell.shell("touch {dir}/{file}; ls {dir} | grep {file}".format(
+ file=time.time(), dir="/tmp"))
self.assertEqual(0, ret)
self.assertIsNotNone(buff)
diff --git a/buildscripts/tests/test_test_failures.py b/buildscripts/tests/test_test_failures.py
index 0a2c570897b..84bfce21317 100644
--- a/buildscripts/tests/test_test_failures.py
+++ b/buildscripts/tests/test_test_failures.py
@@ -15,14 +15,10 @@ class TestReportEntry(unittest.TestCase):
Tests for the test_failures.ReportEntry class.
"""
- ENTRY = test_failures.ReportEntry(test="jstests/core/all.js",
- task="jsCore_WT",
- variant="linux-64",
- distro="rhel62",
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=0,
- num_fail=0)
+ ENTRY = test_failures.ReportEntry(test="jstests/core/all.js", task="jsCore_WT",
+ variant="linux-64", distro="rhel62", start_date=datetime.date(
+ 2017, 6, 3), end_date=datetime.date(2017, 6, 3),
+ num_pass=0, num_fail=0)
def test_fail_rate(self):
"""
@@ -110,25 +106,21 @@ class TestReportEntry(unittest.TestCase):
and num_fail attributes are accumulated correctly.
"""
- entry1 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 1),
- end_date=datetime.date(2017, 6, 1),
- num_pass=1,
- num_fail=0)
+ entry1 = self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 1), end_date=datetime.date(2017, 6, 1), num_pass=1,
+ num_fail=0)
- entry2 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 2),
- end_date=datetime.date(2017, 6, 2),
- num_pass=0,
- num_fail=3)
+ entry2 = self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 2), end_date=datetime.date(2017, 6, 2), num_pass=0,
+ num_fail=3)
- entry3 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=0,
- num_fail=0)
+ entry3 = self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3), end_date=datetime.date(2017, 6, 3), num_pass=0,
+ num_fail=0)
- entry4 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 4),
- end_date=datetime.date(2017, 6, 4),
- num_pass=2,
- num_fail=2)
+ entry4 = self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 4), end_date=datetime.date(2017, 6, 4), num_pass=2,
+ num_fail=2)
entry_1234 = test_failures.ReportEntry.sum([entry1, entry2, entry3, entry4])
entry_1432 = test_failures.ReportEntry.sum([entry1, entry4, entry3, entry2])
@@ -160,25 +152,17 @@ class TestReportEntry(unittest.TestCase):
attributes are accumulated correctly.
"""
- entry1 = self.ENTRY._replace(test="jstests/core/all.js",
- task="jsCore_WT",
- variant="linux-64",
- distro="rhel62")
+ entry1 = self.ENTRY._replace(test="jstests/core/all.js", task="jsCore_WT",
+ variant="linux-64", distro="rhel62")
- entry2 = self.ENTRY._replace(test="jstests/core/all.js",
- task="jsCore_WT",
- variant="linux-64",
- distro="rhel55")
+ entry2 = self.ENTRY._replace(test="jstests/core/all.js", task="jsCore_WT",
+ variant="linux-64", distro="rhel55")
- entry3 = self.ENTRY._replace(test="jstests/core/all2.js",
- task="jsCore_WT",
- variant="linux-64-debug",
- distro="rhel62")
+ entry3 = self.ENTRY._replace(test="jstests/core/all2.js", task="jsCore_WT",
+ variant="linux-64-debug", distro="rhel62")
- entry4 = self.ENTRY._replace(test="jstests/core/all.js",
- task="jsCore",
- variant="linux-64-debug",
- distro="rhel62")
+ entry4 = self.ENTRY._replace(test="jstests/core/all.js", task="jsCore",
+ variant="linux-64-debug", distro="rhel62")
entry_12 = test_failures.ReportEntry.sum([entry1, entry2])
self.assertEqual("jstests/core/all.js", entry_12.test)
@@ -210,50 +194,31 @@ class TestReportSummarization(unittest.TestCase):
Tests for test_failures.Report.summarize_by().
"""
- ENTRY = test_failures.ReportEntry(test="jstests/core/all.js",
- task="jsCore_WT",
- variant="linux-64",
- distro="rhel62",
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=0,
- num_fail=0)
+ ENTRY = test_failures.ReportEntry(test="jstests/core/all.js", task="jsCore_WT",
+ variant="linux-64", distro="rhel62", start_date=datetime.date(
+ 2017, 6, 3), end_date=datetime.date(2017, 6, 3),
+ num_pass=0, num_fail=0)
ENTRIES = [
- ENTRY._replace(start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=1,
- num_fail=0),
- ENTRY._replace(task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1),
- ENTRY._replace(start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0),
+ ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3), end_date=datetime.date(2017, 6, 3), num_pass=1,
+ num_fail=0),
+ ENTRY._replace(task="jsCore", start_date=datetime.date(2017, 6, 5), end_date=datetime.date(
+ 2017, 6, 5), num_pass=0, num_fail=1),
+ ENTRY._replace(
+ start_date=datetime.date(2017, 6, 10), end_date=datetime.date(2017, 6, 10), num_pass=1,
+ num_fail=0),
# The following entry is intentionally not in timestamp order to verify that the
# 'time_period' parameter becomes part of the sort in summarize_by().
- ENTRY._replace(start_date=datetime.date(2017, 6, 9),
- end_date=datetime.date(2017, 6, 9),
- num_pass=1,
- num_fail=0),
- ENTRY._replace(distro="rhel55",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=0,
- num_fail=1),
- ENTRY._replace(test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0),
- ENTRY._replace(variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 17),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1),
+ ENTRY._replace(
+ start_date=datetime.date(2017, 6, 9), end_date=datetime.date(2017, 6, 9), num_pass=1,
+ num_fail=0),
+ ENTRY._replace(distro="rhel55", start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10), num_pass=0, num_fail=1),
+ ENTRY._replace(test="jstests/core/all2.js", start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10), num_pass=1, num_fail=0),
+ ENTRY._replace(variant="linux-64-debug", start_date=datetime.date(2017, 6, 17),
+ end_date=datetime.date(2017, 6, 17), num_pass=0, num_fail=1),
]
def test_group_all_by_test_task_variant_distro(self):
@@ -265,40 +230,45 @@ class TestReportSummarization(unittest.TestCase):
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK_VARIANT_DISTRO)
self.assertEqual(5, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- distro="rhel55",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 10),
- num_pass=3,
- num_fail=0,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 17),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[4], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task="jsCore",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 5),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ distro="rhel55",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=3,
+ num_fail=0,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 17),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[4],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_all_by_test_task_variant(self):
"""
@@ -309,34 +279,38 @@ class TestReportSummarization(unittest.TestCase):
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK_VARIANT)
self.assertEqual(4, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 10),
- num_pass=3,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 17),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task="jsCore",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 5),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=3,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 17),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_all_by_test_task(self):
"""
@@ -346,28 +320,31 @@ class TestReportSummarization(unittest.TestCase):
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK)
self.assertEqual(3, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- variant=test_failures.Wildcard("variants"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 17),
- num_pass=3,
- num_fail=2,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task="jsCore",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 5),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ variant=test_failures.Wildcard("variants"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=3,
+ num_fail=2,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_all_by_test(self):
"""
@@ -377,22 +354,24 @@ class TestReportSummarization(unittest.TestCase):
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST)
self.assertEqual(2, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- variant=test_failures.Wildcard("variants"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 17),
- num_pass=3,
- num_fail=3,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ variant=test_failures.Wildcard("variants"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=3,
+ num_fail=3,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_all_by_variant_task(self):
"""
@@ -402,28 +381,31 @@ class TestReportSummarization(unittest.TestCase):
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(["variant", "task"])
self.assertEqual(3, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- test=test_failures.Wildcard("tests"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 10),
- num_pass=4,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 17),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task="jsCore",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 5),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ test=test_failures.Wildcard("tests"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=4,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 17),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
def test_group_weekly_by_test_starting_on_sunday(self):
"""
@@ -437,34 +419,38 @@ class TestReportSummarization(unittest.TestCase):
start_day_of_week=test_failures.Report.SUNDAY)
self.assertEqual(4, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=1,
- num_fail=0,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 4),
- end_date=datetime.date(2017, 6, 10),
- num_pass=2,
- num_fail=2,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 11),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 4),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 3),
+ num_pass=1,
+ num_fail=0,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 4),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=2,
+ num_fail=2,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 11),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 4),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_weekly_by_test_starting_on_monday(self):
"""
@@ -478,34 +464,38 @@ class TestReportSummarization(unittest.TestCase):
start_day_of_week=test_failures.Report.MONDAY)
self.assertEqual(4, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 4),
- num_pass=1,
- num_fail=0,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 11),
- num_pass=2,
- num_fail=2,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 12),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 11),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 4),
+ num_pass=1,
+ num_fail=0,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 11),
+ num_pass=2,
+ num_fail=2,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 12),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 11),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_weekly_by_test_starting_on_date(self):
"""
@@ -522,34 +512,38 @@ class TestReportSummarization(unittest.TestCase):
start_day_of_week=date)
self.assertEqual(4, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 6),
- num_pass=1,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 7),
- end_date=datetime.date(2017, 6, 13),
- num_pass=2,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 14),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 7),
- end_date=datetime.date(2017, 6, 13),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 6),
+ num_pass=1,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 7),
+ end_date=datetime.date(2017, 6, 13),
+ num_pass=2,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 14),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 7),
+ end_date=datetime.date(2017, 6, 13),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_daily_by_test(self):
"""
@@ -561,46 +555,52 @@ class TestReportSummarization(unittest.TestCase):
time_period=test_failures.Report.DAILY)
self.assertEqual(6, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=1,
- num_fail=0,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- task="jsCore",
- start_date=datetime.date(2017, 6, 5),
- end_date=datetime.date(2017, 6, 5),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- start_date=datetime.date(2017, 6, 9),
- end_date=datetime.date(2017, 6, 9),
- num_pass=1,
- num_fail=0,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[4], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 17),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[5], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 10),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 3),
+ num_pass=1,
+ num_fail=0,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ task="jsCore",
+ start_date=datetime.date(2017, 6, 5),
+ end_date=datetime.date(2017, 6, 5),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ start_date=datetime.date(2017, 6, 9),
+ end_date=datetime.date(2017, 6, 9),
+ num_pass=1,
+ num_fail=0,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[4],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 17),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[5],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 10),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_4days_by_test(self):
"""
@@ -612,34 +612,38 @@ class TestReportSummarization(unittest.TestCase):
time_period=datetime.timedelta(days=4))
self.assertEqual(4, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 6),
- num_pass=1,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 7),
- end_date=datetime.date(2017, 6, 10),
- num_pass=2,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 15),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[3], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 7),
- end_date=datetime.date(2017, 6, 10),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 6),
+ num_pass=1,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 7),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=2,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 15),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[3],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 7),
+ end_date=datetime.date(2017, 6, 10),
+ num_pass=1,
+ num_fail=0,
+ ))
def test_group_9days_by_test(self):
"""
@@ -652,25 +656,28 @@ class TestReportSummarization(unittest.TestCase):
time_period=datetime.timedelta(days=9))
self.assertEqual(3, len(summed_entries))
- self.assertEqual(summed_entries[0], self.ENTRY._replace(
- task=test_failures.Wildcard("tasks"),
- distro=test_failures.Wildcard("distros"),
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 11),
- num_pass=3,
- num_fail=2,
- ))
- self.assertEqual(summed_entries[1], self.ENTRY._replace(
- variant="linux-64-debug",
- start_date=datetime.date(2017, 6, 12),
- end_date=datetime.date(2017, 6, 17),
- num_pass=0,
- num_fail=1,
- ))
- self.assertEqual(summed_entries[2], self.ENTRY._replace(
- test="jstests/core/all2.js",
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 11),
- num_pass=1,
- num_fail=0,
- ))
+ self.assertEqual(summed_entries[0],
+ self.ENTRY._replace(
+ task=test_failures.Wildcard("tasks"),
+ distro=test_failures.Wildcard("distros"),
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 11),
+ num_pass=3,
+ num_fail=2,
+ ))
+ self.assertEqual(summed_entries[1],
+ self.ENTRY._replace(
+ variant="linux-64-debug",
+ start_date=datetime.date(2017, 6, 12),
+ end_date=datetime.date(2017, 6, 17),
+ num_pass=0,
+ num_fail=1,
+ ))
+ self.assertEqual(summed_entries[2],
+ self.ENTRY._replace(
+ test="jstests/core/all2.js",
+ start_date=datetime.date(2017, 6, 3),
+ end_date=datetime.date(2017, 6, 11),
+ num_pass=1,
+ num_fail=0,
+ ))
diff --git a/buildscripts/tests/test_update_test_lifecycle.py b/buildscripts/tests/test_update_test_lifecycle.py
index feca9e577ee..64ffc6b8b7a 100644
--- a/buildscripts/tests/test_update_test_lifecycle.py
+++ b/buildscripts/tests/test_update_test_lifecycle.py
@@ -20,13 +20,12 @@ class TestValidateConfig(unittest.TestCase):
"""
CONFIG = update_test_lifecycle.Config(
- test_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- task_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- variant_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- distro_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- reliable_min_runs=2,
- reliable_time_period=datetime.timedelta(days=1),
- unreliable_min_runs=2,
+ test_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), task_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), variant_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), distro_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), reliable_min_runs=2,
+ reliable_time_period=datetime.timedelta(days=1), unreliable_min_runs=2,
unreliable_time_period=datetime.timedelta(days=1))
def test_acceptable_test_fail_rate(self):
@@ -180,8 +179,8 @@ class TestValidateConfig(unittest.TestCase):
with self.assertRaises(ValueError):
config = self.CONFIG._replace(
- variant_fail_rates=self.CONFIG.variant_fail_rates._replace(acceptable=0.9,
- unacceptable=0.1))
+ variant_fail_rates=self.CONFIG.variant_fail_rates._replace(
+ acceptable=0.9, unacceptable=0.1))
update_test_lifecycle.validate_config(config)
def test_acceptable_distro_fail_rate(self):
@@ -232,8 +231,8 @@ class TestValidateConfig(unittest.TestCase):
with self.assertRaises(ValueError):
config = self.CONFIG._replace(
- distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9,
- unacceptable=0.1))
+ distro_fail_rates=self.CONFIG.distro_fail_rates._replace(
+ acceptable=0.9, unacceptable=0.1))
update_test_lifecycle.validate_config(config)
def test_reliable_min_runs(self):
@@ -328,23 +327,18 @@ class TestUpdateTags(unittest.TestCase):
"""
CONFIG = update_test_lifecycle.Config(
- test_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- task_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- variant_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- distro_fail_rates=update_test_lifecycle.Rates(acceptable=0, unacceptable=1),
- reliable_min_runs=2,
- reliable_time_period=datetime.timedelta(days=1),
- unreliable_min_runs=2,
+ test_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), task_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), variant_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), distro_fail_rates=update_test_lifecycle.Rates(
+ acceptable=0, unacceptable=1), reliable_min_runs=2,
+ reliable_time_period=datetime.timedelta(days=1), unreliable_min_runs=2,
unreliable_time_period=datetime.timedelta(days=1))
- ENTRY = test_failures.ReportEntry(test="jstests/core/all.js",
- task="jsCore_WT",
- variant="linux-64",
- distro="rhel62",
- start_date=datetime.date(2017, 6, 3),
- end_date=datetime.date(2017, 6, 3),
- num_pass=0,
- num_fail=0)
+ ENTRY = test_failures.ReportEntry(test="jstests/core/all.js", task="jsCore_WT",
+ variant="linux-64", distro="rhel62", start_date=datetime.date(
+ 2017, 6, 3), end_date=datetime.date(2017, 6, 3),
+ num_pass=0, num_fail=0)
def assert_has_only_js_tests(self, lifecycle):
"""
@@ -395,9 +389,10 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
test_fail_rates=self.CONFIG.test_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable"]),
- ]))
+ self.transition_from_reliable_to_unreliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable"]),
+ ]))
def test_transition_task_from_reliable_to_unreliable(self):
"""
@@ -408,9 +403,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
task_fail_rates=self.CONFIG.task_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT"]),
- ]))
+ self.transition_from_reliable_to_unreliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT"]),
+ ]))
def test_transition_variant_from_reliable_to_unreliable(self):
"""
@@ -421,9 +418,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64"]),
- ]))
+ self.transition_from_reliable_to_unreliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT|linux-64"]),
+ ]))
def test_transition_distro_from_reliable_to_unreliable(self):
"""
@@ -434,9 +433,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64|rhel62"]),
- ]))
+ self.transition_from_reliable_to_unreliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT|linux-64|rhel62"]),
+ ]))
def test_transition_from_reliable_to_unreliable(self):
"""
@@ -449,14 +450,15 @@ class TestUpdateTags(unittest.TestCase):
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(unacceptable=0.1),
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config, collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_reliable_to_unreliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def transition_from_unreliable_to_reliable(self, config, initial_tags):
"""
@@ -516,23 +518,19 @@ class TestUpdateTags(unittest.TestCase):
# The test did not run on the reliable period on linux-64.
report = test_failures.Report([
# Failing.
- self.ENTRY._replace(num_pass=0,
- num_fail=2),
+ self.ENTRY._replace(num_pass=0, num_fail=2),
# Passing on a different variant.
- self.ENTRY._replace(start_date=reliable_period_date,
- end_date=reliable_period_date,
- num_pass=3,
- num_fail=0,
- variant="linux-alt",
- distro="debian7"),
+ self.ENTRY._replace(start_date=reliable_period_date, end_date=reliable_period_date,
+ num_pass=3, num_fail=0, variant="linux-alt", distro="debian7"),
])
update_test_lifecycle.validate_config(config)
update_test_lifecycle.update_tags(summary_lifecycle, config, report, tests)
updated_tags = self.assert_has_only_js_tests(lifecycle)
# The tags for variant and distro have been removed.
- self.assertEqual(updated_tags, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable", "unreliable|jsCore_WT"])]))
+ self.assertEqual(updated_tags,
+ collections.OrderedDict([("jstests/core/all.js",
+ ["unreliable", "unreliable|jsCore_WT"])]))
def test_non_running_at_all_is_reliable(self):
"""
@@ -574,9 +572,10 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
test_fail_rates=self.CONFIG.test_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable"]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable"]),
+ ]))
def test_transition_task_from_unreliable_to_reliable(self):
"""
@@ -587,9 +586,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
task_fail_rates=self.CONFIG.task_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT"]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT"]),
+ ]))
def test_transition_variant_from_unreliable_to_reliable(self):
"""
@@ -600,9 +601,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64"]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT|linux-64"]),
+ ]))
def test_transition_distro_from_unreliable_to_reliable(self):
"""
@@ -613,9 +616,11 @@ class TestUpdateTags(unittest.TestCase):
config = self.CONFIG._replace(
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64|rhel62"]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js",
+ ["unreliable|jsCore_WT|linux-64|rhel62"]),
+ ]))
def test_transition_from_unreliable_to_reliable(self):
"""
@@ -629,14 +634,15 @@ class TestUpdateTags(unittest.TestCase):
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(acceptable=0.9),
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def test_remain_reliable(self):
"""
@@ -720,14 +726,15 @@ class TestUpdateTags(unittest.TestCase):
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9),
reliable_min_runs=100)
- self.transition_from_unreliable_to_reliable(config, collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_unreliable_to_reliable(config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def test_obeys_reliable_time_period(self):
"""
@@ -748,14 +755,14 @@ class TestUpdateTags(unittest.TestCase):
tests = ["jstests/core/all.js"]
report = test_failures.Report([
- self.ENTRY._replace(start_date=(self.ENTRY.start_date - datetime.timedelta(days=1)),
- end_date=(self.ENTRY.end_date - datetime.timedelta(days=1)),
- num_pass=1,
- num_fail=0),
- self.ENTRY._replace(start_date=(self.ENTRY.start_date - datetime.timedelta(days=2)),
- end_date=(self.ENTRY.end_date - datetime.timedelta(days=2)),
- num_pass=1,
- num_fail=0),
+ self.ENTRY._replace(
+ start_date=(self.ENTRY.start_date - datetime.timedelta(days=1)),
+ end_date=(self.ENTRY.end_date - datetime.timedelta(days=1)), num_pass=1,
+ num_fail=0),
+ self.ENTRY._replace(
+ start_date=(self.ENTRY.start_date - datetime.timedelta(days=2)),
+ end_date=(self.ENTRY.end_date - datetime.timedelta(days=2)), num_pass=1,
+ num_fail=0),
self.ENTRY._replace(num_pass=0, num_fail=1),
self.ENTRY._replace(num_pass=0, num_fail=1),
self.ENTRY._replace(num_pass=0, num_fail=1, task="jsCore"),
@@ -766,14 +773,15 @@ class TestUpdateTags(unittest.TestCase):
update_test_lifecycle.validate_config(config)
update_test_lifecycle.update_tags(summary_lifecycle, config, report, tests)
updated_tags = self.assert_has_only_js_tests(lifecycle)
- self.assertEqual(updated_tags, collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.assertEqual(updated_tags,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def test_obeys_unreliable_min_runs(self):
"""
@@ -835,14 +843,14 @@ class TestUpdateTags(unittest.TestCase):
tests = ["jstests/core/all.js"]
report = test_failures.Report([
- self.ENTRY._replace(start_date=(self.ENTRY.start_date - datetime.timedelta(days=1)),
- end_date=(self.ENTRY.end_date - datetime.timedelta(days=1)),
- num_pass=0,
- num_fail=1),
- self.ENTRY._replace(start_date=(self.ENTRY.start_date - datetime.timedelta(days=2)),
- end_date=(self.ENTRY.end_date - datetime.timedelta(days=2)),
- num_pass=0,
- num_fail=1),
+ self.ENTRY._replace(
+ start_date=(self.ENTRY.start_date - datetime.timedelta(days=1)),
+ end_date=(self.ENTRY.end_date - datetime.timedelta(days=1)), num_pass=0,
+ num_fail=1),
+ self.ENTRY._replace(
+ start_date=(self.ENTRY.start_date - datetime.timedelta(days=2)),
+ end_date=(self.ENTRY.end_date - datetime.timedelta(days=2)), num_pass=0,
+ num_fail=1),
self.ENTRY._replace(num_pass=1, num_fail=0),
self.ENTRY._replace(num_pass=1, num_fail=0),
self.ENTRY._replace(num_pass=1, num_fail=0, task="jsCore"),
@@ -858,13 +866,11 @@ class TestUpdateTags(unittest.TestCase):
class TestCombinationHelpers(unittest.TestCase):
def test_from_entry(self):
- entry = test_failures._ReportEntry(
- "testA", "taskA", "variantA", "distroA",
- datetime.date.today(),
- datetime.date.today(), 0, 0)
+ entry = test_failures._ReportEntry("testA", "taskA", "variantA", "distroA",
+ datetime.date.today(), datetime.date.today(), 0, 0)
combination = update_test_lifecycle._test_combination_from_entry(
entry, test_failures.Report.TEST)
- self.assertEqual(combination, ("testA",))
+ self.assertEqual(combination, ("testA", ))
combination = update_test_lifecycle._test_combination_from_entry(
entry, test_failures.Report.TEST_TASK)
@@ -881,12 +887,10 @@ class TestCombinationHelpers(unittest.TestCase):
def test_make_from_tag(self):
test = "testA"
- combination = update_test_lifecycle._test_combination_from_tag(
- test, "unreliable")
- self.assertEqual(combination, ("testA",))
+ combination = update_test_lifecycle._test_combination_from_tag(test, "unreliable")
+ self.assertEqual(combination, ("testA", ))
- combination = update_test_lifecycle._test_combination_from_tag(
- test, "unreliable|taskA")
+ combination = update_test_lifecycle._test_combination_from_tag(test, "unreliable|taskA")
self.assertEqual(combination, ("testA", "taskA"))
combination = update_test_lifecycle._test_combination_from_tag(
@@ -901,57 +905,57 @@ class TestCombinationHelpers(unittest.TestCase):
class TestCleanUpTags(unittest.TestCase):
@classmethod
def setUpClass(cls):
- cls.evg = MockEvergreenConfig(["task1", "task2", "task3"],
- {"variant1": {"tasks": ["task1", "task2"],
- "distros": ["distro1"]},
- "variant2": {"tasks": ["task3"],
- "distros": ["distro2"]}})
+ cls.evg = MockEvergreenConfig(
+ ["task1", "task2", "task3"], {
+ "variant1": {"tasks": ["task1", "task2"], "distros": ["distro1"]},
+ "variant2": {"tasks": ["task3"], "distros": ["distro2"]}
+ })
def test_is_unreliable_tag_relevant(self):
self.assertTrue(update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable"))
def test_is_unknown_task_relevant(self):
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task_unknown"))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task_unknown"))
def test_is_known_task_relevant(self):
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1"))
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task2"))
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task3"))
+ self.assertTrue(update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task1"))
+ self.assertTrue(update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task2"))
+ self.assertTrue(update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task3"))
def test_is_unknown_variant_relevant(self):
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1|variant3"
- ))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task1|variant3"))
def test_is_unknown_task_variant_relevant(self):
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task3|variant1"))
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1|variant2"))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task3|variant1"))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task1|variant2"))
def test_is_known_task_variant_relevant(self):
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1|variant1"))
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task2|variant1"))
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task3|variant2"))
+ self.assertTrue(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task1|variant1"))
+ self.assertTrue(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task2|variant1"))
+ self.assertTrue(
+ update_test_lifecycle._is_tag_still_relevant(self.evg, "unreliable|task3|variant2"))
def test_is_unknown_task_variant_distro_relevant(self):
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1|variant1|distro2"))
- self.assertFalse(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task3|variant2|distro1"))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg,
+ "unreliable|task1|variant1|distro2"))
+ self.assertFalse(
+ update_test_lifecycle._is_tag_still_relevant(self.evg,
+ "unreliable|task3|variant2|distro1"))
def test_is_known_task_variant_distro_relevant(self):
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task1|variant1|distro1"))
- self.assertTrue(update_test_lifecycle._is_tag_still_relevant(
- self.evg, "unreliable|task3|variant2|distro2"))
+ self.assertTrue(
+ update_test_lifecycle._is_tag_still_relevant(self.evg,
+ "unreliable|task1|variant1|distro1"))
+ self.assertTrue(
+ update_test_lifecycle._is_tag_still_relevant(self.evg,
+ "unreliable|task3|variant2|distro2"))
class MockEvergreenConfig(object):
@@ -973,8 +977,11 @@ class MockVariant(object):
class TestJiraIssueCreator(unittest.TestCase):
def test_description(self):
- data = {"js_test": {"testfile1": {"tag1": 0.1, "tag2": 0.2},
- "testfile2": {"tag1": 0.1, "tag3": 0.3}}}
+ data = {
+ "js_test": {
+ "testfile1": {"tag1": 0.1, "tag2": 0.2}, "testfile2": {"tag1": 0.1, "tag3": 0.3}
+ }
+ }
desc = update_test_lifecycle.JiraIssueCreator._make_updated_tags_description(data)
expected = ("- *js_test*\n"
"-- {{testfile1}}\n"
@@ -992,8 +999,7 @@ class TestJiraIssueCreator(unittest.TestCase):
self.assertEqual(expected, desc)
def test_clean_up_description(self):
- data = {"js_test": {"testfile1": ["tag1", "tag2"],
- "testfile2": []}}
+ data = {"js_test": {"testfile1": ["tag1", "tag2"], "testfile2": []}}
desc = update_test_lifecycle.JiraIssueCreator._make_tags_cleaned_up_description(data)
expected = ("- *js_test*\n"
"-- {{testfile1}}\n"
@@ -1017,8 +1023,9 @@ class TestJiraIssueCreator(unittest.TestCase):
self.assertTrue(desc == update_test_lifecycle.JiraIssueCreator._truncate_description(desc))
desc += "a"
- self.assertTrue(len(update_test_lifecycle.JiraIssueCreator._truncate_description(desc)) <=
- update_test_lifecycle.JiraIssueCreator._MAX_DESCRIPTION_SIZE)
+ self.assertTrue(
+ len(update_test_lifecycle.JiraIssueCreator._truncate_description(desc)) <=
+ update_test_lifecycle.JiraIssueCreator._MAX_DESCRIPTION_SIZE)
class TestTagsConfigWithChangelog(unittest.TestCase):
diff --git a/buildscripts/update_test_lifecycle.py b/buildscripts/update_test_lifecycle.py
index 887f0cc78af..922c4a5da63 100755
--- a/buildscripts/update_test_lifecycle.py
+++ b/buildscripts/update_test_lifecycle.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-
"""Test Failures
Update etc/test_lifecycle.yml to tag unreliable tests based on historic failure rates.
@@ -36,7 +35,6 @@ from buildscripts import test_failures as tf
from buildscripts.ciconfig import evergreen as ci_evergreen
from buildscripts.ciconfig import tags as ci_tags
-
LOGGER = logging.getLogger(__name__)
if sys.version_info[0] == 2:
@@ -44,10 +42,8 @@ if sys.version_info[0] == 2:
else:
_NUMBER_TYPES = (int, float)
-
Rates = collections.namedtuple("Rates", ["acceptable", "unacceptable"])
-
Config = collections.namedtuple("Config", [
"test_fail_rates",
"task_fail_rates",
@@ -59,21 +55,16 @@ Config = collections.namedtuple("Config", [
"unreliable_time_period",
])
-
DEFAULT_CONFIG = Config(
- test_fail_rates=Rates(acceptable=0.1, unacceptable=0.3),
- task_fail_rates=Rates(acceptable=0.1, unacceptable=0.3),
- variant_fail_rates=Rates(acceptable=0.2, unacceptable=0.4),
- distro_fail_rates=Rates(acceptable=0.2, unacceptable=0.4),
- reliable_min_runs=5,
- reliable_time_period=datetime.timedelta(weeks=1),
- unreliable_min_runs=20,
- unreliable_time_period=datetime.timedelta(weeks=4))
-
+ test_fail_rates=Rates(acceptable=0.1, unacceptable=0.3), task_fail_rates=Rates(
+ acceptable=0.1, unacceptable=0.3),
+ variant_fail_rates=Rates(acceptable=0.2, unacceptable=0.4), distro_fail_rates=Rates(
+ acceptable=0.2,
+ unacceptable=0.4), reliable_min_runs=5, reliable_time_period=datetime.timedelta(weeks=1),
+ unreliable_min_runs=20, unreliable_time_period=datetime.timedelta(weeks=4))
DEFAULT_PROJECT = "mongodb-mongo-master"
-
DEFAULT_NUM_THREADS = 12
@@ -133,8 +124,8 @@ def create_batch_groups(test_groups, batch_size):
class TestHistorySource(object):
-
"""A class used to parallelize requests to buildscripts.test_failures.TestHistory."""
+
def __init__(self, project, variants, distros, start_revision, end_revision,
thread_pool_size=DEFAULT_NUM_THREADS):
"""
@@ -161,18 +152,17 @@ class TestHistorySource(object):
The requests for each task will be parallelized using the internal thread pool.
"""
history_data = []
- jobs = [self._thread_pool.apply_async(self._get_task_history_data, (tests, task))
- for task in tasks]
+ jobs = [
+ self._thread_pool.apply_async(self._get_task_history_data, (tests, task))
+ for task in tasks
+ ]
for job in jobs:
history_data.extend(job.get())
return history_data
def _get_task_history_data(self, tests, task):
- test_history = tf.TestHistory(project=self._project,
- tests=tests,
- tasks=[task],
- variants=self._variants,
- distros=self._distros)
+ test_history = tf.TestHistory(project=self._project, tests=tests, tasks=[task],
+ variants=self._variants, distros=self._distros)
return test_history.get_history_by_revision(start_revision=self._start_revision,
end_revision=self._end_revision)
@@ -235,9 +225,8 @@ def check_days(name, days):
def unreliable_tag(task, variant, distro):
"""Returns the unreliable tag."""
- for (component_name, component_value) in (("task", task),
- ("variant", variant),
- ("distro", distro)):
+ for (component_name, component_value) in (("task", task), ("variant", variant), ("distro",
+ distro)):
if isinstance(component_value, (tf.Wildcard, tf.Missing)):
if component_name == "task":
return "unreliable"
@@ -255,17 +244,13 @@ def update_lifecycle(lifecycle_tags_file, report, method_test, add_tags, fail_ra
The test_method checks unreliable or reliable fail_rates.
"""
for summary in report:
- if method_test(summary.fail_rate,
- fail_rate,
- summary.num_pass + summary.num_fail,
- min_run):
+ if method_test(summary.fail_rate, fail_rate, summary.num_pass + summary.num_fail, min_run):
update_tag = unreliable_tag(summary.task, summary.variant, summary.distro)
if add_tags:
- lifecycle_tags_file.add_tag("js_test", summary.test,
- update_tag, summary.fail_rate)
+ lifecycle_tags_file.add_tag("js_test", summary.test, update_tag, summary.fail_rate)
else:
- lifecycle_tags_file.remove_tag("js_test", summary.test,
- update_tag, summary.fail_rate)
+ lifecycle_tags_file.remove_tag("js_test", summary.test, update_tag,
+ summary.fail_rate)
def compare_tags(tag_a, tag_b):
@@ -280,10 +265,9 @@ def validate_config(config):
Raises a TypeError or ValueError exception if 'config' isn't a valid model.
"""
- for (name, fail_rates) in (("test", config.test_fail_rates),
- ("task", config.task_fail_rates),
- ("variant", config.variant_fail_rates),
- ("distro", config.distro_fail_rates)):
+ for (name, fail_rates) in (("test", config.test_fail_rates), ("task", config.task_fail_rates),
+ ("variant", config.variant_fail_rates), ("distro",
+ config.distro_fail_rates)):
if not isinstance(fail_rates.acceptable, _NUMBER_TYPES):
raise TypeError("The acceptable {} failure rate must be a number, but got {}".format(
name, fail_rates.acceptable))
@@ -299,11 +283,11 @@ def validate_config(config):
elif fail_rates.acceptable > fail_rates.unacceptable:
raise ValueError(
("The acceptable {0} failure rate ({1}) must be no larger than unacceptable {0}"
- " failure rate ({2})").format(
- name, fail_rates.acceptable, fail_rates.unacceptable))
+ " failure rate ({2})").format(name, fail_rates.acceptable,
+ fail_rates.unacceptable))
- for (name, min_runs) in (("reliable", config.reliable_min_runs),
- ("unreliable", config.unreliable_min_runs)):
+ for (name, min_runs) in (("reliable", config.reliable_min_runs), ("unreliable",
+ config.unreliable_min_runs)):
if not isinstance(min_runs, _NUMBER_TYPES):
raise TypeError(("The minimum number of runs for considering a test {} must be a"
" number, but got {}").format(name, min_runs))
@@ -365,10 +349,9 @@ def update_tags(lifecycle_tags, config, report, tests):
# before assignment.
grouped_entries = None
for (i, (components, rates)) in enumerate(
- ((tf.Report.TEST_TASK_VARIANT_DISTRO, config.distro_fail_rates),
- (tf.Report.TEST_TASK_VARIANT, config.variant_fail_rates),
- (tf.Report.TEST_TASK, config.task_fail_rates),
- (tf.Report.TEST, config.test_fail_rates))):
+ ((tf.Report.TEST_TASK_VARIANT_DISTRO,
+ config.distro_fail_rates), (tf.Report.TEST_TASK_VARIANT, config.variant_fail_rates),
+ (tf.Report.TEST_TASK, config.task_fail_rates), (tf.Report.TEST, config.test_fail_rates))):
if i > 0:
report = tf.Report(grouped_entries)
@@ -379,14 +362,17 @@ def update_tags(lifecycle_tags, config, report, tests):
# Create the reliable report.
# Filter out any test executions from prior to 'config.reliable_time_period'.
- reliable_start_date = (report.end_date - config.reliable_time_period
- + datetime.timedelta(days=1))
- reliable_entries = [entry for entry in grouped_entries
- if entry.start_date >= reliable_start_date]
+ reliable_start_date = (
+ report.end_date - config.reliable_time_period + datetime.timedelta(days=1))
+ reliable_entries = [
+ entry for entry in grouped_entries if entry.start_date >= reliable_start_date
+ ]
if reliable_entries:
reliable_report = tf.Report(reliable_entries)
- reliable_combinations = {_test_combination_from_entry(entry, components)
- for entry in reliable_entries}
+ reliable_combinations = {
+ _test_combination_from_entry(entry, components)
+ for entry in reliable_entries
+ }
reliable_summaries = reliable_report.summarize_by(components)
else:
reliable_combinations = set()
@@ -396,12 +382,12 @@ def update_tags(lifecycle_tags, config, report, tests):
# Filter out any test executions from prior to 'config.unreliable_time_period'.
# Also filter out any test that is not present in the reliable_report in order
# to avoid tagging as unreliable tests that are no longer running.
- unreliable_start_date = (report.end_date - config.unreliable_time_period
- + datetime.timedelta(days=1))
+ unreliable_start_date = (
+ report.end_date - config.unreliable_time_period + datetime.timedelta(days=1))
unreliable_entries = [
entry for entry in grouped_entries
- if (entry.start_date >= unreliable_start_date and
- _test_combination_from_entry(entry, components) in reliable_combinations)
+ if (entry.start_date >= unreliable_start_date
+ and _test_combination_from_entry(entry, components) in reliable_combinations)
]
if unreliable_entries:
unreliable_report = tf.Report(unreliable_entries)
@@ -410,19 +396,11 @@ def update_tags(lifecycle_tags, config, report, tests):
unreliable_summaries = []
# Update the tags using the unreliable report.
- update_lifecycle(lifecycle_tags,
- unreliable_summaries,
- unreliable_test,
- True,
- rates.unacceptable,
- config.unreliable_min_runs)
+ update_lifecycle(lifecycle_tags, unreliable_summaries, unreliable_test, True,
+ rates.unacceptable, config.unreliable_min_runs)
# Update the tags using the reliable report.
- update_lifecycle(lifecycle_tags,
- reliable_summaries,
- reliable_test,
- False,
- rates.acceptable,
+ update_lifecycle(lifecycle_tags, reliable_summaries, reliable_test, False, rates.acceptable,
config.reliable_min_runs)
def should_be_removed(test, tag):
@@ -497,18 +475,12 @@ def _config_as_options(config):
"--taskFailRates {} {} "
"--variantFailRates {} {} "
"--distroFailRates {} {}").format(
- config.reliable_min_runs,
- config.reliable_time_period.days,
- config.unreliable_min_runs,
- config.unreliable_time_period.days,
- config.test_fail_rates.acceptable,
- config.test_fail_rates.unacceptable,
- config.task_fail_rates.acceptable,
- config.task_fail_rates.unacceptable,
- config.variant_fail_rates.acceptable,
- config.variant_fail_rates.unacceptable,
- config.distro_fail_rates.acceptable,
- config.distro_fail_rates.unacceptable)
+ config.reliable_min_runs, config.reliable_time_period.days,
+ config.unreliable_min_runs, config.unreliable_time_period.days,
+ config.test_fail_rates.acceptable, config.test_fail_rates.unacceptable,
+ config.task_fail_rates.acceptable, config.task_fail_rates.unacceptable,
+ config.variant_fail_rates.acceptable, config.variant_fail_rates.unacceptable,
+ config.distro_fail_rates.acceptable, config.distro_fail_rates.unacceptable)
class TagsConfigWithChangelog(object):
@@ -571,28 +543,17 @@ class JiraIssueCreator(object):
_PROJECT = "TIGBOT"
_MAX_DESCRIPTION_SIZE = 32767
- def __init__(self,
- server=None,
- username=None,
- password=None,
- access_token=None,
- access_token_secret=None,
- consumer_key=None,
- key_cert=None):
+ def __init__(self, server=None, username=None, password=None, access_token=None,
+ access_token_secret=None, consumer_key=None, key_cert=None):
self._client = jiraclient.JiraClient(
- server=server,
- username=username,
- password=password,
- access_token=access_token,
- access_token_secret=access_token_secret,
- consumer_key=consumer_key,
- key_cert=key_cert)
+ server=server, username=username, password=password, access_token=access_token,
+ access_token_secret=access_token_secret, consumer_key=consumer_key, key_cert=key_cert)
def create_issue(self, evg_project, mongo_revision, model_config, added, removed, cleaned_up):
"""Create a JIRA issue for the test lifecycle tag update."""
summary = self._get_jira_summary(evg_project)
- description = self._get_jira_description(evg_project, mongo_revision, model_config,
- added, removed, cleaned_up)
+ description = self._get_jira_description(evg_project, mongo_revision, model_config, added,
+ removed, cleaned_up)
issue_key = self._client.create_issue(self._PROJECT, summary, description, [self._LABEL])
return issue_key
@@ -643,8 +604,8 @@ class JiraIssueCreator(object):
"h5. Tags added\n{3}\n\n"
"h5. Tags removed\n{4}\n\n"
"h5. Tags cleaned up (no longer relevant)\n{5}\n").format(
- project_link, revision_link, mono(config_desc),
- added_desc, removed_desc, cleaned_up_desc)
+ project_link, revision_link, mono(config_desc), added_desc, removed_desc,
+ cleaned_up_desc)
return JiraIssueCreator._truncate_description(full_desc)
@@ -738,8 +699,8 @@ class LifecycleTagsFile(object):
@staticmethod
def _clone_repository(metadata_repo_url, branch):
directory_name = posixpath.splitext(posixpath.basename(metadata_repo_url))[0]
- LOGGER.info("Cloning the repository %s into the directory %s",
- metadata_repo_url, directory_name)
+ LOGGER.info("Cloning the repository %s into the directory %s", metadata_repo_url,
+ directory_name)
return git.Repository.clone(metadata_repo_url, directory_name, branch)
def is_modified(self):
@@ -749,9 +710,8 @@ class LifecycleTagsFile(object):
def _create_issue(self):
LOGGER.info("Creating a JIRA issue")
issue_key = self.jira_issue_creator.create_issue(
- self.project, self.mongo_revision, self.model_config,
- self.changelog_lifecycle.added, self.changelog_lifecycle.removed,
- self.changelog_lifecycle.cleaned_up)
+ self.project, self.mongo_revision, self.model_config, self.changelog_lifecycle.added,
+ self.changelog_lifecycle.removed, self.changelog_lifecycle.cleaned_up)
LOGGER.info("JIRA issue created: %s", issue_key)
return issue_key
@@ -766,8 +726,8 @@ class LifecycleTagsFile(object):
def _ready_for_commit(self, ref_branch, references):
# Check that the test lifecycle tags file has changed.
- diff = self.metadata_repo.git_diff(["--name-only", ref_branch,
- self.relative_lifecycle_file])
+ diff = self.metadata_repo.git_diff(
+ ["--name-only", ref_branch, self.relative_lifecycle_file])
if not diff:
LOGGER.info("The local lifecycle file is identical to the the one on branch '%s'",
ref_branch)
@@ -777,8 +737,8 @@ class LifecycleTagsFile(object):
if update_revision and not self.mongo_repo.is_ancestor(update_revision,
self.mongo_revision):
LOGGER.warning(("The existing lifecycle file is based on revision '%s' which is not a"
- " parent revision of the current revision '%s'"),
- update_revision, self.mongo_revision)
+ " parent revision of the current revision '%s'"), update_revision,
+ self.mongo_revision)
return False
return True
@@ -875,14 +835,9 @@ def make_lifecycle_tags_file(options, model_config):
jira_issue_creator = None
git_config = None
- lifecycle_tags_file = LifecycleTagsFile(
- options.project,
- options.tag_file,
- options.metadata_repo_url,
- options.references_file,
- jira_issue_creator,
- git_config,
- model_config)
+ lifecycle_tags_file = LifecycleTagsFile(options.project, options.tag_file,
+ options.metadata_repo_url, options.references_file,
+ jira_issue_creator, git_config, model_config)
return lifecycle_tags_file
@@ -893,25 +848,21 @@ def main():
Evergreen API.
"""
- parser = optparse.OptionParser(description=textwrap.dedent(main.__doc__),
- usage="Usage: %prog [options] [test1 test2 ...]")
+ parser = optparse.OptionParser(
+ description=textwrap.dedent(main.__doc__), usage="Usage: %prog [options] [test1 test2 ...]")
data_options = optparse.OptionGroup(
- parser,
- title="Data options",
+ parser, title="Data options",
description=("Options used to configure what historical test failure data to retrieve from"
" Evergreen."))
parser.add_option_group(data_options)
- data_options.add_option(
- "--project", dest="project",
- metavar="<project-name>",
- default=tf.TestHistory.DEFAULT_PROJECT,
- help="The Evergreen project to analyze. Defaults to '%default'.")
+ data_options.add_option("--project", dest="project", metavar="<project-name>",
+ default=tf.TestHistory.DEFAULT_PROJECT,
+ help="The Evergreen project to analyze. Defaults to '%default'.")
data_options.add_option(
- "--tasks", dest="tasks",
- metavar="<task1,task2,...>",
+ "--tasks", dest="tasks", metavar="<task1,task2,...>",
help=("The Evergreen tasks to analyze for tagging unreliable tests. If specified in"
" additional to having test positional arguments, then only tests that run under the"
" specified Evergreen tasks will be analyzed. If omitted, then the list of tasks"
@@ -919,27 +870,21 @@ def main():
" --evergreenProjectConfig file."))
data_options.add_option(
- "--variants", dest="variants",
- metavar="<variant1,variant2,...>",
- default="",
+ "--variants", dest="variants", metavar="<variant1,variant2,...>", default="",
help="The Evergreen build variants to analyze for tagging unreliable tests.")
- data_options.add_option(
- "--distros", dest="distros",
- metavar="<distro1,distro2,...>",
- default="",
- help="The Evergreen distros to analyze for tagging unreliable tests.")
+ data_options.add_option("--distros", dest="distros", metavar="<distro1,distro2,...>",
+ default="",
+ help="The Evergreen distros to analyze for tagging unreliable tests.")
data_options.add_option(
"--evergreenProjectConfig", dest="evergreen_project_config",
- metavar="<project-config-file>",
- default="etc/evergreen.yml",
+ metavar="<project-config-file>", default="etc/evergreen.yml",
help=("The Evergreen project configuration file used to get the list of tasks if --tasks is"
" omitted. Defaults to '%default'."))
model_options = optparse.OptionGroup(
- parser,
- title="Model options",
+ parser, title="Model options",
description=("Options used to configure whether (test,), (test, task),"
" (test, task, variant), and (test, task, variant, distro) combinations are"
" considered unreliable."))
@@ -947,16 +892,14 @@ def main():
model_options.add_option(
"--reliableTestMinRuns", type="int", dest="reliable_test_min_runs",
- metavar="<reliable-min-runs>",
- default=DEFAULT_CONFIG.reliable_min_runs,
+ metavar="<reliable-min-runs>", default=DEFAULT_CONFIG.reliable_min_runs,
help=("The minimum number of test executions required for a test's failure rate to"
" determine whether the test is considered reliable. If a test has fewer than"
" <reliable-min-runs> executions, then it cannot be considered unreliable."))
model_options.add_option(
"--unreliableTestMinRuns", type="int", dest="unreliable_test_min_runs",
- metavar="<unreliable-min-runs>",
- default=DEFAULT_CONFIG.unreliable_min_runs,
+ metavar="<unreliable-min-runs>", default=DEFAULT_CONFIG.unreliable_min_runs,
help=("The minimum number of test executions required for a test's failure rate to"
" determine whether the test is considered unreliable. If a test has fewer than"
" <unreliable-min-runs> executions, then it cannot be considered unreliable."))
@@ -1011,105 +954,84 @@ def main():
" unreliable. Defaults to %default."))
model_options.add_option(
- "--reliableDays", type="int", dest="reliable_days",
- metavar="<ndays>",
+ "--reliableDays", type="int", dest="reliable_days", metavar="<ndays>",
default=DEFAULT_CONFIG.reliable_time_period.days,
help=("The time period to analyze when determining if a test has become reliable. Defaults"
" to %default day(s)."))
model_options.add_option(
- "--unreliableDays", type="int", dest="unreliable_days",
- metavar="<ndays>",
+ "--unreliableDays", type="int", dest="unreliable_days", metavar="<ndays>",
default=DEFAULT_CONFIG.unreliable_time_period.days,
help=("The time period to analyze when determining if a test has become unreliable."
" Defaults to %default day(s)."))
- parser.add_option("--resmokeTagFile", dest="tag_file",
- metavar="<tagfile>",
+ parser.add_option("--resmokeTagFile", dest="tag_file", metavar="<tagfile>",
default="etc/test_lifecycle.yml",
help=("The resmoke.py tag file to update. If --metadataRepo is specified, it"
" is the relative path in the metadata repository, otherwise it can be"
" an absolute path or a relative path from the current directory."
" Defaults to '%default'."))
- parser.add_option("--metadataRepo", dest="metadata_repo_url",
- metavar="<metadata-repo-url>",
+ parser.add_option("--metadataRepo", dest="metadata_repo_url", metavar="<metadata-repo-url>",
default="git@github.com:mongodb/mongo-test-metadata.git",
help=("The repository that contains the lifecycle file. "
"It will be cloned in the current working directory. "
"Defaults to '%default'."))
- parser.add_option("--referencesFile", dest="references_file",
- metavar="<references-file>",
+ parser.add_option("--referencesFile", dest="references_file", metavar="<references-file>",
default="references.yml",
help=("The YAML file in the metadata repository that contains the revision "
"mappings. Defaults to '%default'."))
- parser.add_option("--requestBatchSize", type="int", dest="batch_size",
- metavar="<batch-size>",
+ parser.add_option("--requestBatchSize", type="int", dest="batch_size", metavar="<batch-size>",
default=100,
help=("The maximum number of tests to query the Evergreen API for in a single"
" request. A higher value for this option will reduce the number of"
" roundtrips between this client and Evergreen. Defaults to %default."))
parser.add_option("--requestThreads", type="int", dest="num_request_threads",
- metavar="<num-request-threads>",
- default=DEFAULT_NUM_THREADS,
+ metavar="<num-request-threads>", default=DEFAULT_NUM_THREADS,
help=("The maximum number of threads to use when querying the Evergreen API."
" Batches are processed sequentially but the test history is queried in"
" parallel for each task. Defaults to %default."))
commit_options = optparse.OptionGroup(
- parser,
- title="Commit options",
+ parser, title="Commit options",
description=("Options used to configure whether and how to commit the updated test"
" lifecycle tags."))
parser.add_option_group(commit_options)
- commit_options.add_option(
- "--commit", action="store_true", dest="commit",
- default=False,
- help="Indicates that the updated tag file should be committed.")
+ commit_options.add_option("--commit", action="store_true", dest="commit", default=False,
+ help="Indicates that the updated tag file should be committed.")
commit_options.add_option(
- "--jiraConfig", dest="jira_config",
- metavar="<jira-config>",
- default=None,
+ "--jiraConfig", dest="jira_config", metavar="<jira-config>", default=None,
help=("The YAML file containing the JIRA access configuration ('user', 'password',"
"'server')."))
commit_options.add_option(
- "--gitUserName", dest="git_user_name",
- metavar="<git-user-name>",
- default="Test Lifecycle",
+ "--gitUserName", dest="git_user_name", metavar="<git-user-name>", default="Test Lifecycle",
help=("The git user name that will be set before committing to the metadata repository."
" Defaults to '%default'."))
commit_options.add_option(
- "--gitUserEmail", dest="git_user_email",
- metavar="<git-user-email>",
+ "--gitUserEmail", dest="git_user_email", metavar="<git-user-email>",
default="buil+testlifecycle@mongodb.com",
help=("The git user email address that will be set before committing to the metadata"
" repository. Defaults to '%default'."))
logging_options = optparse.OptionGroup(
- parser,
- title="Logging options",
+ parser, title="Logging options",
description="Options used to configure the logging output of the script.")
parser.add_option_group(logging_options)
- logging_options.add_option(
- "--logLevel", dest="log_level",
- metavar="<log-level>",
- choices=["DEBUG", "INFO", "WARNING", "ERROR"],
- default="INFO",
- help=("The log level. Accepted values are: DEBUG, INFO, WARNING and ERROR."
- " Defaults to '%default'."))
+ logging_options.add_option("--logLevel", dest="log_level", metavar="<log-level>", choices=[
+ "DEBUG", "INFO", "WARNING", "ERROR"
+ ], default="INFO", help=("The log level. Accepted values are: DEBUG, INFO, WARNING and ERROR."
+ " Defaults to '%default'."))
logging_options.add_option(
- "--logFile", dest="log_file",
- metavar="<log-file>",
- default=None,
+ "--logFile", dest="log_file", metavar="<log-file>", default=None,
help="The destination file for the logs output. Defaults to the standard output.")
(options, tests) = parser.parse_args()
@@ -1120,8 +1042,8 @@ def main():
" isn't returned by the Evergreen API. This option will therefore be ignored."),
RuntimeWarning)
- logging.basicConfig(format="%(asctime)s %(levelname)s %(message)s",
- level=options.log_level, filename=options.log_file)
+ logging.basicConfig(format="%(asctime)s %(levelname)s %(message)s", level=options.log_level,
+ filename=options.log_file)
evg_conf = ci_evergreen.EvergreenProjectConfig(options.evergreen_project_config)
use_test_tasks_membership = False
@@ -1136,11 +1058,10 @@ def main():
distros = options.distros.split(",") if options.distros else []
config = Config(
- test_fail_rates=Rates(*options.test_fail_rates),
- task_fail_rates=Rates(*options.task_fail_rates),
- variant_fail_rates=Rates(*options.variant_fail_rates),
- distro_fail_rates=Rates(*options.distro_fail_rates),
- reliable_min_runs=options.reliable_test_min_runs,
+ test_fail_rates=Rates(*options.test_fail_rates), task_fail_rates=Rates(
+ *options.task_fail_rates), variant_fail_rates=Rates(
+ *options.variant_fail_rates), distro_fail_rates=Rates(
+ *options.distro_fail_rates), reliable_min_runs=options.reliable_test_min_runs,
reliable_time_period=datetime.timedelta(days=options.reliable_days),
unreliable_min_runs=options.unreliable_test_min_runs,
unreliable_time_period=datetime.timedelta(days=options.unreliable_days))
@@ -1163,9 +1084,8 @@ def main():
# For efficiency purposes, group the tests and process in batches of batch_size.
test_groups = create_batch_groups(create_test_groups(tests), options.batch_size)
- test_history_source = TestHistorySource(options.project, variants, distros,
- commit_prior, commit_last,
- options.num_request_threads)
+ test_history_source = TestHistorySource(options.project, variants, distros, commit_prior,
+ commit_last, options.num_request_threads)
LOGGER.info("Updating the tags")
nb_groups = len(test_groups)
diff --git a/buildscripts/utils.py b/buildscripts/utils.py
index e62be1f8305..a710de822be 100644
--- a/buildscripts/utils.py
+++ b/buildscripts/utils.py
@@ -18,15 +18,10 @@ def getAllSourceFiles(arr=None, prefix="."):
return arr
for x in os.listdir(prefix):
- if (x.startswith(".")
- or x.startswith("pcre-")
- or x.startswith("32bit")
- or x.startswith("mongodb-")
- or x.startswith("debian")
- or x.startswith("mongo-cxx-driver")
- or x.startswith("sqlite")
- or "gotools" in x
- or x.find("mozjs") != -1):
+ if (x.startswith(".") or x.startswith("pcre-") or x.startswith("32bit")
+ or x.startswith("mongodb-") or x.startswith("debian")
+ or x.startswith("mongo-cxx-driver") or x.startswith("sqlite") or "gotools" in x
+ or x.find("mozjs") != -1):
continue
def isFollowableDir(prefix, full):
@@ -58,14 +53,14 @@ def getGitBranch():
if not version.startswith("ref: "):
return version
version = version.split("/")
- version = version[len(version)-1]
+ version = version[len(version) - 1]
return version
def getGitBranchString(prefix="", postfix=""):
t = re.compile("[/\\\]").split(os.getcwd())
- if len(t) > 2 and t[len(t)-1] == "mongo":
- par = t[len(t)-2]
+ if len(t) > 2 and t[len(t) - 1] == "mongo":
+ par = t[len(t) - 2]
m = re.compile(".*_([vV]\d+\.\d+)$").match(par)
if m is not None:
return prefix + m.group(1).lower() + postfix
@@ -94,12 +89,8 @@ def getGitVersion():
def getGitDescribe():
with open(os.devnull, "r+") as devnull:
- proc = subprocess.Popen(
- "git describe",
- stdout=subprocess.PIPE,
- stderr=devnull,
- stdin=devnull,
- shell=True)
+ proc = subprocess.Popen("git describe", stdout=subprocess.PIPE, stderr=devnull,
+ stdin=devnull, shell=True)
return proc.communicate()[0].strip()
@@ -139,12 +130,11 @@ def find_python(min_version=(2, 5)):
pass
version = re.compile(r"[Pp]ython ([\d\.]+)", re.MULTILINE)
- binaries = (
- "python27", "python2.7", "python26", "python2.6", "python25", "python2.5", "python")
+ binaries = ("python27", "python2.7", "python26", "python2.6", "python25", "python2.5", "python")
for binary in binaries:
try:
- out, err = subprocess.Popen(
- [binary, "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+ out, err = subprocess.Popen([binary, "-V"], stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
for stream in (out, err):
match = version.search(stream)
if match:
@@ -154,8 +144,8 @@ def find_python(min_version=(2, 5)):
except:
pass
- raise Exception(
- "could not find suitable Python (version >= %s)" % ".".join(str(v) for v in min_version))
+ raise Exception("could not find suitable Python (version >= %s)" % ".".join(
+ str(v) for v in min_version))
# unicode is a pain. some strings cannot be unicode()'d
@@ -167,4 +157,5 @@ def replace_with_repr(unicode_error):
offender = unicode_error.object[unicode_error.start:unicode_error.end]
return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
+
codecs.register_error("repr", replace_with_repr)