summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathew Robinson <chasinglogic@gmail.com>2019-02-19 10:50:57 -0500
committerMathew Robinson <chasinglogic@gmail.com>2019-04-08 14:08:49 -0400
commit8dd6d4755734ed37c1b98dfdefce3ca6bc65f1f6 (patch)
tree69e936c4953cbead2e3bae2690157c5fe75e709d
parentc600aa9d7423eca8151daf626e2799d9a6c7b31c (diff)
downloadmongo-8dd6d4755734ed37c1b98dfdefce3ca6bc65f1f6.tar.gz
SERVER-32295 Support Python 3
-rw-r--r--.pylintrc9
-rw-r--r--SConstruct65
-rw-r--r--buildscripts/aggregate_tracefiles.py2
-rwxr-xr-xbuildscripts/aws_ec2.py72
-rw-r--r--buildscripts/burn_in_tests.py70
-rwxr-xr-xbuildscripts/bypass_compile_and_fetch_binaries.py22
-rw-r--r--buildscripts/ciconfig/evergreen.py6
-rw-r--r--buildscripts/ciconfig/tags.py19
-rwxr-xr-xbuildscripts/clang_format.py22
-rw-r--r--buildscripts/client/evergreen.py2
-rwxr-xr-xbuildscripts/collect_resource_info.py25
-rwxr-xr-xbuildscripts/combine_reports.py15
-rwxr-xr-xbuildscripts/cpplint.py52
-rwxr-xr-xbuildscripts/errorcodes.py19
-rwxr-xr-xbuildscripts/eslint.py8
-rwxr-xr-xbuildscripts/evergreen_gen_fuzzer_tests.py6
-rwxr-xr-xbuildscripts/evergreen_generate_resmoke_tasks.py11
-rw-r--r--buildscripts/evergreen_resmoke_job_count.py24
-rwxr-xr-xbuildscripts/evergreen_run_tests.py2
-rw-r--r--buildscripts/evergreen_task_timeout.py2
-rwxr-xr-xbuildscripts/fetch_test_lifecycle.py63
-rw-r--r--buildscripts/gdb/mongo.py18
-rw-r--r--buildscripts/gdb/mongo_lock.py18
-rw-r--r--buildscripts/gdb/mongo_printers.py23
-rwxr-xr-xbuildscripts/generate-pip-constraints.sh12
-rwxr-xr-xbuildscripts/generate_compile_expansions.py2
-rwxr-xr-xbuildscripts/generate_compile_expansions_shared_cache.py2
-rw-r--r--buildscripts/git.py34
-rwxr-xr-xbuildscripts/hang_analyzer.py65
-rw-r--r--buildscripts/idl/idl/ast.py132
-rw-r--r--buildscripts/idl/idl/binder.py28
-rw-r--r--buildscripts/idl/idl/bson.py4
-rw-r--r--buildscripts/idl/idl/common.py24
-rw-r--r--buildscripts/idl/idl/compiler.py24
-rw-r--r--buildscripts/idl/idl/cpp_types.py134
-rw-r--r--buildscripts/idl/idl/enum_types.py53
-rw-r--r--buildscripts/idl/idl/errors.py241
-rw-r--r--buildscripts/idl/idl/generator.py337
-rw-r--r--buildscripts/idl/idl/parser.py57
-rw-r--r--buildscripts/idl/idl/struct_types.py42
-rw-r--r--buildscripts/idl/idl/syntax.py187
-rw-r--r--buildscripts/idl/idl/writer.py30
-rw-r--r--buildscripts/idl/idlc.py2
-rw-r--r--buildscripts/idl/run_tests.py1
-rw-r--r--buildscripts/idl/tests/test_binder.py155
-rw-r--r--buildscripts/idl/tests/test_generator.py8
-rw-r--r--buildscripts/idl/tests/test_import.py6
-rw-r--r--buildscripts/idl/tests/test_parser.py10
-rw-r--r--buildscripts/idl/tests/testcase.py33
-rw-r--r--buildscripts/jiraclient.py2
-rw-r--r--buildscripts/lint.py5
-rw-r--r--buildscripts/linter/base.py6
-rw-r--r--buildscripts/linter/git.py4
-rw-r--r--buildscripts/linter/mypy.py9
-rw-r--r--buildscripts/linter/parallel.py8
-rw-r--r--buildscripts/linter/pydocstyle.py2
-rw-r--r--buildscripts/linter/pylint.py4
-rw-r--r--buildscripts/linter/runner.py41
-rw-r--r--buildscripts/linter/yapf.py4
-rwxr-xr-xbuildscripts/make_archive.py14
-rw-r--r--buildscripts/make_vcxproj.py13
-rw-r--r--buildscripts/mobile/adb_monitor.py9
-rw-r--r--buildscripts/mobile/benchrun_embedded_setup_android.py28
-rw-r--r--buildscripts/moduleconfig.py1
-rwxr-xr-xbuildscripts/mongosymb.py8
-rw-r--r--buildscripts/msitrim.py2
-rwxr-xr-xbuildscripts/packager.py53
-rwxr-xr-xbuildscripts/packager_enterprise.py24
-rw-r--r--buildscripts/promote_silent_failures.py12
-rwxr-xr-xbuildscripts/pylinters.py5
-rwxr-xr-xbuildscripts/remote_operations.py156
-rwxr-xr-xbuildscripts/resmoke.py5
-rw-r--r--buildscripts/resmokeconfig/__init__.py1
-rw-r--r--buildscripts/resmokeconfig/loggers/__init__.py4
-rw-r--r--buildscripts/resmokeconfig/suites/__init__.py4
-rw-r--r--buildscripts/resmokelib/__init__.py2
-rw-r--r--buildscripts/resmokelib/config.py31
-rw-r--r--buildscripts/resmokelib/core/__init__.py1
-rw-r--r--buildscripts/resmokelib/core/jasper_process.py2
-rw-r--r--buildscripts/resmokelib/core/network.py2
-rw-r--r--buildscripts/resmokelib/core/pipe.py2
-rw-r--r--buildscripts/resmokelib/core/process.py27
-rw-r--r--buildscripts/resmokelib/core/programs.py2
-rw-r--r--buildscripts/resmokelib/logging/__init__.py2
-rw-r--r--buildscripts/resmokelib/logging/buildlogger.py32
-rw-r--r--buildscripts/resmokelib/logging/flush.py7
-rw-r--r--buildscripts/resmokelib/logging/formatters.py2
-rw-r--r--buildscripts/resmokelib/logging/handlers.py12
-rw-r--r--buildscripts/resmokelib/logging/loggers.py6
-rw-r--r--buildscripts/resmokelib/parser.py364
-rw-r--r--buildscripts/resmokelib/reportfile.py2
-rw-r--r--buildscripts/resmokelib/selector.py8
-rw-r--r--buildscripts/resmokelib/sighandler.py2
-rw-r--r--buildscripts/resmokelib/suitesconfig.py8
-rw-r--r--buildscripts/resmokelib/testing/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/executor.py14
-rw-r--r--buildscripts/resmokelib/testing/fixtures/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/fixtures/external.py2
-rw-r--r--buildscripts/resmokelib/testing/fixtures/interface.py6
-rw-r--r--buildscripts/resmokelib/testing/fixtures/replicaset.py28
-rw-r--r--buildscripts/resmokelib/testing/fixtures/shardedcluster.py4
-rw-r--r--buildscripts/resmokelib/testing/fixtures/standalone.py2
-rw-r--r--buildscripts/resmokelib/testing/fixtures/yesfixture.py2
-rw-r--r--buildscripts/resmokelib/testing/hook_test_archival.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/cleanup.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/collect_embedded_resources.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/combine_benchrun_embedded_results.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/dbhash.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/dbhash_background.py13
-rw-r--r--buildscripts/resmokelib/testing/hooks/drop_sharded_collections.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/initialsync.py33
-rw-r--r--buildscripts/resmokelib/testing/hooks/interface.py6
-rw-r--r--buildscripts/resmokelib/testing/hooks/jsfile.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/oplog.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py14
-rw-r--r--buildscripts/resmokelib/testing/hooks/stepdown.py7
-rw-r--r--buildscripts/resmokelib/testing/hooks/validate.py2
-rw-r--r--buildscripts/resmokelib/testing/hooks/wait_for_replication.py2
-rw-r--r--buildscripts/resmokelib/testing/job.py6
-rw-r--r--buildscripts/resmokelib/testing/report.py2
-rw-r--r--buildscripts/resmokelib/testing/suite.py8
-rw-r--r--buildscripts/resmokelib/testing/summary.py4
-rw-r--r--buildscripts/resmokelib/testing/testcases/__init__.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/benchmark_test.py4
-rw-r--r--buildscripts/resmokelib/testing/testcases/benchrun_embedded_test.py10
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_integration_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/cpp_unittest.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/dbtest.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/fsm_workload_test.py4
-rw-r--r--buildscripts/resmokelib/testing/testcases/gennylib_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/gennytest.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/interface.py10
-rw-r--r--buildscripts/resmokelib/testing/testcases/json_schema_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/jsrunnerfile.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/jstest.py4
-rw-r--r--buildscripts/resmokelib/testing/testcases/mongos_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/mql_model_haskell_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/mql_model_mongod_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py2
-rw-r--r--buildscripts/resmokelib/testing/testcases/sleeptest.py2
-rw-r--r--buildscripts/resmokelib/utils/__init__.py21
-rw-r--r--buildscripts/resmokelib/utils/archival.py40
-rw-r--r--buildscripts/resmokelib/utils/autoloader.py2
-rw-r--r--buildscripts/resmokelib/utils/globstar.py4
-rw-r--r--buildscripts/resmokelib/utils/jscomment.py15
-rw-r--r--buildscripts/resmokelib/utils/queue.py8
-rw-r--r--buildscripts/resmokelib/utils/registry.py10
-rw-r--r--buildscripts/resmokelib/utils/scheduler.py2
-rwxr-xr-xbuildscripts/scons.py2
-rw-r--r--buildscripts/scons_cache_prune.py12
-rwxr-xr-xbuildscripts/setup_multiversion_mongodb.py60
-rw-r--r--buildscripts/tests/ciconfig/test_evergreen.py15
-rw-r--r--buildscripts/tests/ciconfig/test_tags.py5
-rw-r--r--buildscripts/tests/client/test_evergreen.py2
-rw-r--r--buildscripts/tests/metrics/test_burn_in_tests.py70
-rw-r--r--buildscripts/tests/mobile/test_adb_monitor.py4
-rw-r--r--buildscripts/tests/resmokelib/logging/test_buildlogger.py6
-rw-r--r--buildscripts/tests/resmokelib/test_selector.py28
-rwxr-xr-xbuildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py20
-rwxr-xr-xbuildscripts/tests/resmokelib/testing/hooks/test_combine_benchrun_embedded_results.py12
-rw-r--r--buildscripts/tests/resmokelib/testing/test_job.py1
-rw-r--r--buildscripts/tests/resmokelib/utils/test_archival.py11
-rw-r--r--buildscripts/tests/resmokelib/utils/test_rmtree.py31
-rwxr-xr-xbuildscripts/tests/test_aws_ec2.py8
-rw-r--r--buildscripts/tests/test_burn_in_tests.py7
-rw-r--r--buildscripts/tests/test_evergreen_gen_fuzzer_tests.py2
-rw-r--r--buildscripts/tests/test_evergreen_generate_resmoke_tasks.py10
-rw-r--r--buildscripts/tests/test_evergreen_resmoke_job_count.py2
-rw-r--r--buildscripts/tests/test_evergreen_task_tags.py2
-rw-r--r--buildscripts/tests/test_evergreen_task_timeout.py2
-rw-r--r--buildscripts/tests/test_fetch_test_lifecycle.py11
-rw-r--r--buildscripts/tests/test_git.py8
-rwxr-xr-xbuildscripts/tests/test_remote_operations.py2
-rw-r--r--buildscripts/tests/test_update_test_lifecycle.py146
-rw-r--r--buildscripts/tests/util/test_read_config.py2
-rw-r--r--buildscripts/tests/util/test_taskname.py2
-rw-r--r--buildscripts/tests/util/test_testname.py2
-rw-r--r--buildscripts/tests/util/test_time.py2
-rwxr-xr-xbuildscripts/update_test_lifecycle.py20
-rw-r--r--buildscripts/util/runcommand.py20
-rw-r--r--buildscripts/utils.py13
-rw-r--r--buildscripts/validate_mongocryptd.py5
-rwxr-xr-xbuildscripts/yaml_key_value.py2
-rw-r--r--docs/building.md20
-rw-r--r--etc/evergreen.yml111
-rw-r--r--etc/pip/components/compile.req3
-rw-r--r--etc/pip/components/core.req4
-rw-r--r--etc/pip/components/lint.req4
-rw-r--r--etc/pip/components/platform.req4
-rw-r--r--etc/pip/components/resmoke.req4
-rw-r--r--etc/pip/constraints.txt58
-rw-r--r--etc/scons/android_toolchain.vars10
-rw-r--r--etc/scons/mongodbtoolchain_stable_clang.vars3
-rw-r--r--etc/scons/mongodbtoolchain_stable_gcc.vars3
-rw-r--r--etc/scons/mongodbtoolchain_testing_clang.vars3
-rw-r--r--etc/scons/mongodbtoolchain_testing_gcc.vars3
-rw-r--r--etc/scons/mongodbtoolchain_v3_clang.vars10
-rw-r--r--etc/scons/mongodbtoolchain_v3_gcc.vars10
-rw-r--r--etc/scons/xcode_ios.vars6
-rw-r--r--etc/scons/xcode_ios_sim.vars6
-rw-r--r--etc/scons/xcode_macosx.vars6
-rw-r--r--etc/scons/xcode_tvos.vars6
-rw-r--r--etc/scons/xcode_tvos_sim.vars6
-rw-r--r--etc/scons/xcode_watchos.vars6
-rw-r--r--etc/scons/xcode_watchos_sim.vars6
-rw-r--r--jstests/free_mon/libs/mock_http_common.py1
-rw-r--r--jstests/free_mon/libs/mock_http_control.py1
-rw-r--r--jstests/free_mon/libs/mock_http_server.py34
-rw-r--r--jstests/noPassthrough/libs/configExpand/reflect.py5
-rw-r--r--jstests/noPassthrough/libs/configExpand/rest_server.py3
-rw-r--r--jstests/ssl/tls_enumerator.py10
-rw-r--r--mypy.ini3
-rwxr-xr-xpytests/powertest.py399
-rw-r--r--site_scons/libdeps.py92
-rw-r--r--site_scons/mongo/__init__.py2
-rw-r--r--site_scons/mongo/generators.py75
-rw-r--r--site_scons/mongo/platform.py20
-rw-r--r--site_scons/mongo/toolchain.py23
-rw-r--r--site_scons/site_tools/abilink.py12
-rw-r--r--site_scons/site_tools/auto_install_binaries.py21
-rw-r--r--site_scons/site_tools/compilation_db.py49
-rw-r--r--site_scons/site_tools/dagger/__init__.py17
-rw-r--r--site_scons/site_tools/dagger/dagger.py16
-rw-r--r--site_scons/site_tools/dagger/graph.py69
-rw-r--r--site_scons/site_tools/dagger/graph_consts.py5
-rw-r--r--site_scons/site_tools/dagger/graph_test.py62
-rw-r--r--site_scons/site_tools/distsrc.py25
-rw-r--r--site_scons/site_tools/git_decider.py5
-rw-r--r--site_scons/site_tools/gziptool.py3
-rw-r--r--site_scons/site_tools/icecream.py26
-rwxr-xr-xsite_scons/site_tools/idl_tool.py20
-rw-r--r--site_scons/site_tools/incremental_link.py3
-rw-r--r--site_scons/site_tools/jsheader.py9
-rw-r--r--site_scons/site_tools/jstoh.py4
-rw-r--r--site_scons/site_tools/libtool.py2
-rw-r--r--site_scons/site_tools/mongo_benchmark.py7
-rw-r--r--site_scons/site_tools/mongo_integrationtest.py7
-rw-r--r--site_scons/site_tools/mongo_unittest.py7
-rw-r--r--site_scons/site_tools/separate_debug.py35
-rw-r--r--site_scons/site_tools/split_dwarf.py5
-rw-r--r--site_scons/site_tools/thin_archive.py18
-rw-r--r--site_scons/site_tools/xcode.py2
-rw-r--r--src/mongo/SConscript8
-rw-r--r--src/mongo/base/generate_error_codes.py38
-rwxr-xr-xsrc/mongo/db/auth/generate_action_types.py12
-rw-r--r--src/mongo/db/fts/generate_stop_words.py9
-rw-r--r--src/mongo/db/fts/unicode/gen_casefold_map.py15
-rw-r--r--src/mongo/db/fts/unicode/gen_delimiter_list.py23
-rw-r--r--src/mongo/db/fts/unicode/gen_diacritic_list.py10
-rw-r--r--src/mongo/db/fts/unicode/gen_diacritic_map.py18
-rw-r--r--src/mongo/db/fts/unicode/gen_helper.py3
-rwxr-xr-xsrc/mongo/installer/compass/install_compass.in21
-rw-r--r--src/mongo/installer/msi/SConscript8
-rwxr-xr-xsrc/mongo/util/generate_icu_init_cpp.py6
257 files changed, 2651 insertions, 2948 deletions
diff --git a/.pylintrc b/.pylintrc
index c1b73f1829d..cb1a322b41f 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -22,8 +22,15 @@ variable-rgx=[a-z_][a-z0-9_]{1,50}$
# R1705 - no-else-return - sometimes an unnecessary else helps readability
# W0511 - fixme - ignore TODOs in comments
# W0611 - unused-import - typing module is needed for mypy
+# R0205 - useless-object-inheritance - See PM-1380
+# W0402 - deprecated-module - See PM-1380
+# W1505 - deprecated-method - See PM-1380
+# W0107 - unnecessary-pass - See PM-1380
+# R1720 - no-else-raise - See PM-1380
+# W0122 - exec-used - See PM-1380
+# R0801 - duplicate-code - See PM-1380
-disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,no-else-return,redefined-variable-type,too-few-public-methods,unused-import
+disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,no-else-return,redefined-variable-type,too-few-public-methods,unused-import,useless-object-inheritance,deprecated-module,unnecessary-pass,duplicate-code,no-else-raise,deprecated-method,exec-used
[IMPORTS]
known-third-party=boto3,botocore,psutil,yaml,xmlrunner
diff --git a/SConstruct b/SConstruct
index 26a5678664e..ab163523055 100644
--- a/SConstruct
+++ b/SConstruct
@@ -27,7 +27,7 @@ import mongo.platform as mongo_platform
import mongo.toolchain as mongo_toolchain
import mongo.generators as mongo_generators
-EnsurePythonVersion(2, 7)
+EnsurePythonVersion(3, 5)
EnsureSConsVersion(3, 0, 4)
from buildscripts import utils
@@ -466,7 +466,7 @@ win_version_min_choices = {
}
add_option('win-version-min',
- choices=win_version_min_choices.keys(),
+ choices=list(win_version_min_choices.keys()),
default=None,
help='minimum Windows version to support',
type='choice',
@@ -581,7 +581,7 @@ try:
except IOError as e:
# If the file error wasn't because the file is missing, error out
if e.errno != errno.ENOENT:
- print("Error opening version.json: {0}".format(e.strerror))
+ print(("Error opening version.json: {0}".format(e.strerror)))
Exit(1)
version_data = {
@@ -590,14 +590,14 @@ except IOError as e:
}
except ValueError as e:
- print("Error decoding version.json: {0}".format(e))
+ print(("Error decoding version.json: {0}".format(e)))
Exit(1)
# Setup the command-line variables
def variable_shlex_converter(val):
# If the argument is something other than a string, propogate
# it literally.
- if not isinstance(val, basestring):
+ if not isinstance(val, str):
return val
parse_mode = get_option('variable-parse-mode')
if parse_mode == 'auto':
@@ -661,7 +661,7 @@ def variable_distsrc_converter(val):
variables_files = variable_shlex_converter(get_option('variables-files'))
for file in variables_files:
- print("Using variable customization file %s" % file)
+ print(("Using variable customization file %s" % file))
env_vars = Variables(
files=variables_files,
@@ -670,7 +670,7 @@ env_vars = Variables(
sconsflags = os.environ.get('SCONSFLAGS', None)
if sconsflags:
- print("Using SCONSFLAGS environment variable arguments: %s" % sconsflags)
+ print(("Using SCONSFLAGS environment variable arguments: %s" % sconsflags))
env_vars.Add('ABIDW',
help="Configures the path to the 'abidw' (a libabigail) utility")
@@ -800,7 +800,7 @@ env_vars.Add('MONGO_DISTNAME',
def validate_mongo_version(key, val, env):
regex = r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc)(\d+))?.*)?'
if not re.match(regex, val):
- print("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val))
+ print(("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val)))
Exit(1)
env_vars.Add('MONGO_VERSION',
@@ -935,12 +935,12 @@ if installDir[0] not in ['$', '#']:
Exit(1)
sconsDataDir = Dir(buildDir).Dir('scons')
-SConsignFile(str(sconsDataDir.File('sconsign')))
+SConsignFile(str(sconsDataDir.File('sconsign.py3')))
def printLocalInfo():
import sys, SCons
- print( "scons version: " + SCons.__version__ )
- print( "python version: " + " ".join( [ `i` for i in sys.version_info ] ) )
+ print(( "scons version: " + SCons.__version__ ))
+ print(( "python version: " + " ".join( [ repr(i) for i in sys.version_info ] ) ))
printLocalInfo()
@@ -1020,12 +1020,12 @@ env.AddMethod(mongo_platform.env_os_is_wrapper, 'TargetOSIs')
env.AddMethod(mongo_platform.env_get_os_name_wrapper, 'GetTargetOSName')
def fatal_error(env, msg, *args):
- print(msg.format(*args))
+ print((msg.format(*args)))
Exit(1)
def conf_error(env, msg, *args):
- print(msg.format(*args))
- print("See {0} for details".format(env.File('$CONFIGURELOG').abspath))
+ print((msg.format(*args)))
+ print(("See {0} for details".format(env.File('$CONFIGURELOG').abspath)))
Exit(1)
env.AddMethod(fatal_error, 'FatalError')
@@ -1044,12 +1044,12 @@ else:
env.AddMethod(lambda env: env['VERBOSE'], 'Verbose')
if has_option('variables-help'):
- print(env_vars.GenerateHelpText(env))
+ print((env_vars.GenerateHelpText(env)))
Exit(0)
unknown_vars = env_vars.UnknownVariables()
if unknown_vars:
- env.FatalError("Unknown variables specified: {0}", ", ".join(unknown_vars.keys()))
+ env.FatalError("Unknown variables specified: {0}", ", ".join(list(unknown_vars.keys())))
def set_config_header_define(env, varname, varval = 1):
env['CONFIG_HEADER_DEFINES'][varname] = varval
@@ -1135,7 +1135,7 @@ def CheckForProcessor(context, which_arch):
context.Result(ret)
return ret;
- for k in processor_macros.keys():
+ for k in list(processor_macros.keys()):
ret = run_compile_check(k)
if ret:
context.Result('Detected a %s processor' % k)
@@ -1258,7 +1258,7 @@ else:
env['TARGET_ARCH'] = detected_processor
if env['TARGET_OS'] not in os_macros:
- print("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS']))
+ print(("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS'])))
elif not detectConf.CheckForOS(env['TARGET_OS']):
env.ConfError("TARGET_OS ({0}) is not supported by compiler", env['TARGET_OS'])
@@ -1393,8 +1393,8 @@ if link_model.startswith("dynamic"):
if env.TargetOSIs('darwin'):
if link_model.startswith('dynamic'):
- print("WARNING: Building MongoDB server with dynamic linking " +
- "on macOS is not supported. Static linking is recommended.")
+ print(("WARNING: Building MongoDB server with dynamic linking " +
+ "on macOS is not supported. Static linking is recommended."))
if link_model == "dynamic-strict":
# Darwin is strict by default
@@ -2151,7 +2151,7 @@ def doConfigure(myenv):
# form -Wno-xxx (but not -Wno-error=xxx), we also add -Wxxx to the flags. GCC does
# warn on unknown -Wxxx style flags, so this lets us probe for availablity of
# -Wno-xxx.
- for kw in test_mutation.keys():
+ for kw in list(test_mutation.keys()):
test_flags = test_mutation[kw]
for test_flag in test_flags:
if test_flag.startswith("-Wno-") and not test_flag.startswith("-Wno-error="):
@@ -2165,7 +2165,7 @@ def doConfigure(myenv):
# to make them real errors.
cloned.Append(CCFLAGS=['-Werror'])
conf = Configure(cloned, help=False, custom_tests = {
- 'CheckFlag' : lambda(ctx) : CheckFlagTest(ctx, tool, extension, flag)
+ 'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag)
})
available = conf.CheckFlag()
conf.Finish()
@@ -2645,7 +2645,7 @@ def doConfigure(myenv):
# Select those unique black files that are associated with the
# currently enabled sanitizers, but filter out those that are
# zero length.
- blackfiles = {v for (k, v) in blackfiles_map.iteritems() if k in sanitizer_list}
+ blackfiles = {v for (k, v) in blackfiles_map.items() if k in sanitizer_list}
blackfiles = [f for f in blackfiles if os.stat(f.path).st_size != 0]
# Filter out any blacklist options that the toolchain doesn't support.
@@ -2676,7 +2676,7 @@ def doConfigure(myenv):
llvm_symbolizer = get_option('llvm-symbolizer')
if os.path.isabs(llvm_symbolizer):
if not myenv.File(llvm_symbolizer).exists():
- print("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer)
+ print(("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer))
llvm_symbolizer = None
else:
llvm_symbolizer = myenv.WhereIs(llvm_symbolizer)
@@ -2997,7 +2997,7 @@ def doConfigure(myenv):
# TODO: If we could programmatically extract the paths from the info output
# we could give a better message here, but brew info's machine readable output
# doesn't seem to include the whole 'caveats' section.
- message = subprocess.check_output([brew, "info", "openssl"])
+ message = subprocess.check_output([brew, "info", "openssl"]).decode('utf-8')
advice = textwrap.dedent(
"""\
NOTE: HomeBrew installed to {0} appears to have OpenSSL installed.
@@ -3159,7 +3159,7 @@ def doConfigure(myenv):
# Either crypto engine is native,
# or it's OpenSSL and has been checked to be working.
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_SSL")
- print("Using SSL Provider: {0}".format(ssl_provider))
+ print(("Using SSL Provider: {0}".format(ssl_provider)))
else:
ssl_provider = "none"
@@ -3182,7 +3182,7 @@ def doConfigure(myenv):
files = ['ssleay32.dll', 'libeay32.dll']
for extra_file in files:
if not addOpenSslLibraryToDistArchive(extra_file):
- print("WARNING: Cannot find SSL library '%s'" % extra_file)
+ print(("WARNING: Cannot find SSL library '%s'" % extra_file))
def checkHTTPLib(required=False):
# WinHTTP available on Windows
@@ -3558,7 +3558,7 @@ def doConfigure(myenv):
outputIndex = next((idx for idx in [0,1] if conf.CheckAltivecVbpermqOutput(idx)), None)
if outputIndex is not None:
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex)
+ conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex)
else:
myenv.ConfError("Running on ppc64le, but can't find a correct vec_vbpermq output index. Compiler or platform not supported")
@@ -3658,9 +3658,12 @@ def doLint( env , target , source ):
import buildscripts.pylinters
buildscripts.pylinters.lint_all(None, {}, [])
- import buildscripts.lint
- if not buildscripts.lint.run_lint( [ "src/mongo/" ] ):
- raise Exception( "lint errors" )
+ env.Command(
+ target="#run_lint",
+ source=["buildscripts/lint.py", "src/mongo"],
+ action="$PYTHON $SOURCES[0] $SOURCES[1]",
+ )
+
env.Alias( "lint" , [] , [ doLint ] )
env.AlwaysBuild( "lint" )
diff --git a/buildscripts/aggregate_tracefiles.py b/buildscripts/aggregate_tracefiles.py
index ef2c2d02615..065ec608425 100644
--- a/buildscripts/aggregate_tracefiles.py
+++ b/buildscripts/aggregate_tracefiles.py
@@ -20,7 +20,7 @@ def aggregate(inputs, output):
args += ['-o', output]
- print ' '.join(args)
+ print(' '.join(args))
return subprocess.call(args)
diff --git a/buildscripts/aws_ec2.py b/buildscripts/aws_ec2.py
index bf2677074b7..251b1b7a1b1 100755
--- a/buildscripts/aws_ec2.py
+++ b/buildscripts/aws_ec2.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
"""AWS EC2 instance launcher and controller."""
-from __future__ import print_function
+
import base64
import collections
@@ -88,12 +88,13 @@ class AwsEc2(object):
if reached_state:
print(" Instance {}!".format(instance.state["Name"]), file=sys.stdout)
else:
- print(" Instance in state '{}', failed to reach state '{}'{}!".format(
- instance.state["Name"], state, client_error), file=sys.stdout)
+ print(
+ " Instance in state '{}', failed to reach state '{}'{}!".format(
+ instance.state["Name"], state, client_error), file=sys.stdout)
sys.stdout.flush()
return 0 if reached_state else 1
- def control_instance( #pylint: disable=too-many-arguments,too-many-branches
+ def control_instance( #pylint: disable=too-many-arguments,too-many-branches,too-many-locals
self, mode, image_id, wait_time_secs=0, show_progress=False, console_output_file=None,
console_screenshot_file=None):
"""Control an AMI instance. Returns 0 & status information, if successful."""
@@ -238,27 +239,29 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
create_options = optparse.OptionGroup(parser, "Create options")
status_options = optparse.OptionGroup(parser, "Status options")
- parser.add_option("--mode", dest="mode", choices=_MODES, default="status",
- help=("Operations to perform on an EC2 instance, choose one of"
- " '{}', defaults to '%default'.".format(", ".join(_MODES))))
+ parser.add_option(
+ "--mode", dest="mode", choices=_MODES, default="status",
+ help=("Operations to perform on an EC2 instance, choose one of"
+ " '{}', defaults to '%default'.".format(", ".join(_MODES))))
control_options.add_option("--imageId", dest="image_id", default=None,
help="EC2 image_id to perform operation on [REQUIRED for control].")
- control_options.add_option("--waitTimeSecs", dest="wait_time_secs", type=int, default=5 * 60,
- help=("Time to wait for EC2 instance to reach it's new state,"
- " defaults to '%default'."))
+ control_options.add_option(
+ "--waitTimeSecs", dest="wait_time_secs", type=int, default=5 * 60,
+ help=("Time to wait for EC2 instance to reach it's new state,"
+ " defaults to '%default'."))
create_options.add_option("--ami", dest="ami", default=None,
help="EC2 AMI to launch [REQUIRED for create].")
- create_options.add_option("--blockDevice", dest="block_devices",
- metavar="DEVICE-NAME DEVICE-SIZE-GB", action="append", default=[],
- nargs=2,
- help=("EBS device name and volume size in GiB."
- " More than one device can be attached, by specifying"
- " this option more than once."
- " The device will be deleted on termination of the instance."))
+ create_options.add_option(
+ "--blockDevice", dest="block_devices", metavar="DEVICE-NAME DEVICE-SIZE-GB",
+ action="append", default=[], nargs=2,
+ help=("EBS device name and volume size in GiB."
+ " More than one device can be attached, by specifying"
+ " this option more than once."
+ " The device will be deleted on termination of the instance."))
create_options.add_option("--instanceType", dest="instance_type", default="t1.micro",
help="EC2 instance type to launch, defaults to '%default'.")
@@ -266,15 +269,15 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
create_options.add_option("--keyName", dest="key_name", default=None,
help="EC2 key name [REQUIRED for create].")
- create_options.add_option("--securityGroupIds", dest="security_group_ids", action="append",
- default=[],
- help=("EC2 security group ids. More than one security group id can be"
- " added, by specifying this option more than once."))
+ create_options.add_option(
+ "--securityGroupIds", dest="security_group_ids", action="append", default=[],
+ help=("EC2 security group ids. More than one security group id can be"
+ " added, by specifying this option more than once."))
- create_options.add_option("--securityGroup", dest="security_groups", action="append",
- default=[],
- help=("EC2 security group. More than one security group can be added,"
- " by specifying this option more than once."))
+ create_options.add_option(
+ "--securityGroup", dest="security_groups", action="append", default=[],
+ help=("EC2 security group. More than one security group can be added,"
+ " by specifying this option more than once."))
create_options.add_option("--subnetId", dest="subnet_id", default=None,
help="EC2 subnet id to use in VPC.")
@@ -296,14 +299,15 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
status_options.add_option("--yamlFile", dest="yaml_file", default=None,
help="Save the status into the specified YAML file.")
- status_options.add_option("--consoleOutputFile", dest="console_output_file", default=None,
- help="Save the console output into the specified file, if"
- " available.")
+ status_options.add_option(
+ "--consoleOutputFile", dest="console_output_file", default=None,
+ help="Save the console output into the specified file, if"
+ " available.")
- status_options.add_option("--consoleScreenshotFile", dest="console_screenshot_file",
- default=None,
- help="Save the console screenshot (JPG format) into the specified"
- " file, if available.")
+ status_options.add_option(
+ "--consoleScreenshotFile", dest="console_screenshot_file", default=None,
+ help="Save the console screenshot (JPG format) into the specified"
+ " file, if available.")
parser.add_option_group(control_options)
parser.add_option_group(create_options)
@@ -331,8 +335,8 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
# The 'expire-on' key is a UTC time.
expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=options.tag_expire_hours)
tags = [{"Key": "expire-on", "Value": expire_dt.strftime("%Y-%m-%d %H:%M:%S")},
- {"Key": "Name",
- "Value": options.tag_name}, {"Key": "owner", "Value": options.tag_owner}]
+ {"Key": "Name", "Value": options.tag_name},
+ {"Key": "owner", "Value": options.tag_owner}]
my_kwargs = {}
if options.extra_args is not None:
diff --git a/buildscripts/burn_in_tests.py b/buildscripts/burn_in_tests.py
index d39d3c0eef9..d3bf34df2f8 100644
--- a/buildscripts/burn_in_tests.py
+++ b/buildscripts/burn_in_tests.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Command line utility for determining what jstests have been added or modified."""
-from __future__ import absolute_import
-from __future__ import print_function
import collections
import copy
@@ -13,7 +11,7 @@ import subprocess
import re
import shlex
import sys
-import urlparse
+import urllib.parse
import requests
import yaml
@@ -56,13 +54,15 @@ def parse_command_line():
parser = optparse.OptionParser(usage="Usage: %prog [options] [resmoke command]")
- parser.add_option("--maxRevisions", dest="max_revisions", type=int, default=25,
- help=("Maximum number of revisions to check for changes. Default is"
- " %default."))
+ parser.add_option(
+ "--maxRevisions", dest="max_revisions", type=int, default=25,
+ help=("Maximum number of revisions to check for changes. Default is"
+ " %default."))
- parser.add_option("--branch", dest="branch", default="master",
- help=("The name of the branch the working branch was based on. Default is"
- " '%default'."))
+ parser.add_option(
+ "--branch", dest="branch", default="master",
+ help=("The name of the branch the working branch was based on. Default is"
+ " '%default'."))
parser.add_option("--baseCommit", dest="base_commit", default=None,
help="The base commit to compare to for determining changes.")
@@ -79,14 +79,15 @@ def parse_command_line():
help=("The distro the tasks will execute on. Can only be specified"
" with --generateTasksFile."))
- parser.add_option("--checkEvergreen", dest="check_evergreen", default=False,
- action="store_true",
- help=("Checks Evergreen for the last commit that was scheduled."
- " This way all the tests that haven't been burned in will be run."))
+ parser.add_option(
+ "--checkEvergreen", dest="check_evergreen", default=False, action="store_true",
+ help=("Checks Evergreen for the last commit that was scheduled."
+ " This way all the tests that haven't been burned in will be run."))
- parser.add_option("--generateTasksFile", dest="generate_tasks_file", default=None,
- help=("Write an Evergreen generate.tasks JSON file. If this option is"
- " specified then no tests will be executed."))
+ parser.add_option(
+ "--generateTasksFile", dest="generate_tasks_file", default=None,
+ help=("Write an Evergreen generate.tasks JSON file. If this option is"
+ " specified then no tests will be executed."))
parser.add_option("--noExec", dest="no_exec", default=False, action="store_true",
help="Do not run resmoke loop on new tests.")
@@ -100,21 +101,25 @@ def parse_command_line():
parser.add_option("--testListOutfile", dest="test_list_outfile", default=None,
help="Write a JSON file with test executor information.")
- parser.add_option("--repeatTests", dest="repeat_tests_num", default=None, type=int,
- help="The number of times to repeat each test. If --repeatTestsSecs is not"
- " specified then this will be set to {}.".format(REPEAT_SUITES))
+ parser.add_option(
+ "--repeatTests", dest="repeat_tests_num", default=None, type=int,
+ help="The number of times to repeat each test. If --repeatTestsSecs is not"
+ " specified then this will be set to {}.".format(REPEAT_SUITES))
- parser.add_option("--repeatTestsMin", dest="repeat_tests_min", default=None, type=int,
- help="The minimum number of times to repeat each test when --repeatTestsSecs"
- " is specified.")
+ parser.add_option(
+ "--repeatTestsMin", dest="repeat_tests_min", default=None, type=int,
+ help="The minimum number of times to repeat each test when --repeatTestsSecs"
+ " is specified.")
- parser.add_option("--repeatTestsMax", dest="repeat_tests_max", default=None, type=int,
- help="The maximum number of times to repeat each test when --repeatTestsSecs"
- " is specified.")
+ parser.add_option(
+ "--repeatTestsMax", dest="repeat_tests_max", default=None, type=int,
+ help="The maximum number of times to repeat each test when --repeatTestsSecs"
+ " is specified.")
- parser.add_option("--repeatTestsSecs", dest="repeat_tests_secs", default=None, type=float,
- help="Time, in seconds, to repeat each test. Note that this option is"
- " mutually exclusive with with --repeatTests.")
+ parser.add_option(
+ "--repeatTestsSecs", dest="repeat_tests_secs", default=None, type=float,
+ help="Time, in seconds, to repeat each test. Note that this option is"
+ " mutually exclusive with with --repeatTests.")
# This disables argument parsing on the first unrecognized parameter. This allows us to pass
# a complete resmoke.py command line without accidentally parsing its options.
@@ -169,7 +174,7 @@ def find_last_activated_task(revisions, variant, branch_name):
evg_cfg = evergreen_client.read_evg_config()
if evg_cfg is not None and "api_server_host" in evg_cfg:
api_server = "{url.scheme}://{url.netloc}".format(
- url=urlparse.urlparse(evg_cfg["api_server_host"]))
+ url=urllib.parse.urlparse(evg_cfg["api_server_host"]))
else:
api_server = API_SERVER_DEFAULT
@@ -210,6 +215,7 @@ def find_changed_tests( # pylint: disable=too-many-locals
if base_commit is None:
base_commit = repo.get_merge_base([branch_name + "@{upstream}", "HEAD"])
+
if check_evergreen:
# We're going to check up to 200 commits in Evergreen for the last scheduled one.
# The current commit will be activated in Evergreen; we use --skip to start at the
@@ -268,8 +274,8 @@ def find_excludes(selector_file):
try:
js_test = yml["selector"]["js_test"]
except KeyError:
- raise Exception(
- "The selector file " + selector_file + " is missing the 'selector.js_test' key")
+ raise Exception("The selector file " + selector_file +
+ " is missing the 'selector.js_test' key")
return (resmokelib.utils.default_if_none(js_test.get("exclude_suites"), []),
resmokelib.utils.default_if_none(js_test.get("exclude_tasks"), []),
@@ -352,7 +358,7 @@ def create_task_list( #pylint: disable=too-many-locals
evg_buildvariant = evergreen_conf.get_variant(buildvariant)
if not evg_buildvariant:
- print("Buildvariant '{}' not found".format(buildvariant))
+ print("Buildvariant '{}' not found in {}".format(buildvariant, evergreen_conf.path))
sys.exit(1)
# Find all the buildvariant tasks.
diff --git a/buildscripts/bypass_compile_and_fetch_binaries.py b/buildscripts/bypass_compile_and_fetch_binaries.py
index a3fd00738b5..addf335e162 100755
--- a/buildscripts/bypass_compile_and_fetch_binaries.py
+++ b/buildscripts/bypass_compile_and_fetch_binaries.py
@@ -1,8 +1,8 @@
#!/usr/bin/env python
"""Bypass compile and fetch binaries."""
-from __future__ import absolute_import
-from __future__ import print_function
+
+
import argparse
import json
@@ -11,10 +11,10 @@ import re
import sys
import tarfile
-import urllib
+import urllib.request, urllib.parse, urllib.error
# pylint: disable=ungrouped-imports
try:
- from urlparse import urlparse
+ from urllib.parse import urlparse
except ImportError:
from urllib.parse import urlparse # type: ignore
# pylint: enable=ungrouped-imports
@@ -259,8 +259,8 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
print("Retrieving archive {}".format(filename))
# This is the artifacts.tgz as referenced in evergreen.yml.
try:
- urllib.urlretrieve(artifact["url"], filename)
- except urllib.ContentTooShortError:
+ urllib.request.urlretrieve(artifact["url"], filename)
+ except urllib.error.ContentTooShortError:
print("The artifact {} could not be completely downloaded. Default"
" compile bypass to false.".format(filename))
return
@@ -287,8 +287,8 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
print("Retrieving mongo source {}".format(filename))
# This is the distsrc.[tgz|zip] as referenced in evergreen.yml.
try:
- urllib.urlretrieve(artifact["url"], filename)
- except urllib.ContentTooShortError:
+ urllib.request.urlretrieve(artifact["url"], filename)
+ except urllib.error.ContentTooShortError:
print("The artifact {} could not be completely downloaded. Default"
" compile bypass to false.".format(filename))
return
@@ -310,9 +310,9 @@ def main(): # pylint: disable=too-many-locals,too-many-statements
# SERVER-21492 related issue where without running scons the jstests/libs/key1
# and key2 files are not chmod to 0600. Need to change permissions here since we
# bypass SCons.
- os.chmod("jstests/libs/key1", 0600)
- os.chmod("jstests/libs/key2", 0600)
- os.chmod("jstests/libs/keyForRollover", 0600)
+ os.chmod("jstests/libs/key1", 0o600)
+ os.chmod("jstests/libs/key2", 0o600)
+ os.chmod("jstests/libs/keyForRollover", 0o600)
# This is the artifacts.json file.
write_out_artifacts(args.jsonArtifact, artifacts)
diff --git a/buildscripts/ciconfig/evergreen.py b/buildscripts/ciconfig/evergreen.py
index 3e3107a4647..857f5a1779e 100644
--- a/buildscripts/ciconfig/evergreen.py
+++ b/buildscripts/ciconfig/evergreen.py
@@ -61,7 +61,7 @@ class EvergreenProjectConfig(object): # pylint: disable=too-many-instance-attri
@property
def task_names(self):
"""Get the list of task names."""
- return self._tasks_by_name.keys()
+ return list(self._tasks_by_name.keys())
def get_task(self, task_name):
"""Return the task with the given name as a Task instance."""
@@ -70,7 +70,7 @@ class EvergreenProjectConfig(object): # pylint: disable=too-many-instance-attri
@property
def task_group_names(self):
"""Get the list of task_group names."""
- return self._task_groups_by_name.keys()
+ return list(self._task_groups_by_name.keys())
def get_task_group(self, task_group_name):
"""Return the task_group with the given name as a Task instance."""
@@ -92,7 +92,7 @@ class EvergreenProjectConfig(object): # pylint: disable=too-many-instance-attri
@property
def variant_names(self):
"""Get the list of build variant names."""
- return self._variants_by_name.keys()
+ return list(self._variants_by_name.keys())
def get_variant(self, variant_name):
"""Return the variant with the given name as a Variant instance."""
diff --git a/buildscripts/ciconfig/tags.py b/buildscripts/ciconfig/tags.py
index 1627c117a98..268c6707214 100644
--- a/buildscripts/ciconfig/tags.py
+++ b/buildscripts/ciconfig/tags.py
@@ -1,7 +1,7 @@
"""Module to access and modify tag configuration files used by resmoke."""
-from __future__ import absolute_import
-from __future__ import print_function
+
+
import collections
import copy
@@ -9,10 +9,12 @@ import textwrap
import yaml
+from functools import cmp_to_key
+
# Setup to preserve order in yaml.dump, see https://stackoverflow.com/a/8661021
def _represent_dict_order(self, data):
- return self.represent_mapping("tag:yaml.org,2002:map", data.items())
+ return self.represent_mapping("tag:yaml.org,2002:map", list(data.items()))
yaml.add_representer(collections.OrderedDict, _represent_dict_order)
@@ -57,11 +59,11 @@ class TagsConfig(object):
def get_test_kinds(self):
"""List the test kinds."""
- return self._conf.keys()
+ return list(self._conf.keys())
def get_test_patterns(self, test_kind):
"""List the test patterns under 'test_kind'."""
- return getdefault(self._conf, test_kind, {}).keys()
+ return list(getdefault(self._conf, test_kind, {}).keys())
def get_tags(self, test_kind, test_pattern):
"""List the tags under 'test_kind' and 'test_pattern'."""
@@ -74,7 +76,7 @@ class TagsConfig(object):
tags = setdefault(patterns, test_pattern, [])
if tag not in tags:
tags.append(tag)
- tags.sort(cmp=self._cmp_func)
+ tags.sort(key=cmp_to_key(self._cmp_func) if self._cmp_func else None)
return True
return False
@@ -110,8 +112,9 @@ class TagsConfig(object):
"""
with open(filename, "w") as fstream:
if preamble:
- print(textwrap.fill(preamble, width=100, initial_indent="# ",
- subsequent_indent="# "), file=fstream)
+ print(
+ textwrap.fill(preamble, width=100, initial_indent="# ", subsequent_indent="# "),
+ file=fstream)
# We use yaml.safe_dump() in order avoid having strings being written to the file as
# "!!python/unicode ..." and instead have them written as plain 'str' instances.
diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py
index 477b4750d99..b1fee3b0239 100755
--- a/buildscripts/clang_format.py
+++ b/buildscripts/clang_format.py
@@ -7,7 +7,7 @@
4. Has support for checking which files are to be checked.
5. Supports validating and updating a set of files to the right coding style.
"""
-from __future__ import print_function, absolute_import
+
import difflib
import glob
@@ -20,7 +20,7 @@ import sys
import tarfile
import tempfile
import threading
-import urllib2
+import urllib.request, urllib.error, urllib.parse
from distutils import spawn # pylint: disable=no-name-in-module
from optparse import OptionParser
from multiprocessing import cpu_count
@@ -52,14 +52,14 @@ CLANG_FORMAT_HTTP_LINUX_CACHE = "https://s3.amazonaws.com/boxes.10gen.com/build/
CLANG_FORMAT_HTTP_DARWIN_CACHE = "https://s3.amazonaws.com/boxes.10gen.com/build/clang%2Bllvm-3.8.0-x86_64-apple-darwin.tar.xz"
# Path in the tarball to the clang-format binary
-CLANG_FORMAT_SOURCE_TAR_BASE = string.Template(
- "clang+llvm-$version-$tar_path/bin/" + CLANG_FORMAT_PROGNAME)
+CLANG_FORMAT_SOURCE_TAR_BASE = string.Template("clang+llvm-$version-$tar_path/bin/" +
+ CLANG_FORMAT_PROGNAME)
##############################################################################
def callo(args):
"""Call a program, and capture its output."""
- return subprocess.check_output(args)
+ return subprocess.check_output(args).decode('utf-8')
def get_tar_path(version, tar_path):
@@ -96,11 +96,11 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
num_tries = 5
for attempt in range(num_tries):
try:
- resp = urllib2.urlopen(url)
+ resp = urllib.request.urlopen(url)
with open(temp_tar_file, 'wb') as fh:
fh.write(resp.read())
break
- except urllib2.URLError:
+ except urllib.error.URLError:
if attempt == num_tries - 1:
raise
continue
@@ -229,7 +229,7 @@ class ClangFormat(object):
def _lint(self, file_name, print_diff):
"""Check the specified file has the correct format."""
with open(file_name, 'rb') as original_text:
- original_file = original_text.read()
+ original_file = original_text.read().decode('utf-8')
# Get formatted file as clang-format would format the file
formatted_file = callo([self.path, "--style=file", file_name])
@@ -381,9 +381,9 @@ def reformat_branch( # pylint: disable=too-many-branches,too-many-locals,too-ma
"Commit After Reformat '%s' is not a valid commit in this repo" % commit_after_reformat)
if not repo.is_ancestor(commit_prior_to_reformat, commit_after_reformat):
- raise ValueError(("Commit Prior to Reformat '%s' is not a valid ancestor of Commit After" +
- " Reformat '%s' in this repo") % (commit_prior_to_reformat,
- commit_after_reformat))
+ raise ValueError(
+ ("Commit Prior to Reformat '%s' is not a valid ancestor of Commit After" +
+ " Reformat '%s' in this repo") % (commit_prior_to_reformat, commit_after_reformat))
# Validate the user is on a local branch that has the right merge base
if repo.is_detached():
diff --git a/buildscripts/client/evergreen.py b/buildscripts/client/evergreen.py
index fe45d3bee04..be59b61ecd6 100644
--- a/buildscripts/client/evergreen.py
+++ b/buildscripts/client/evergreen.py
@@ -4,7 +4,7 @@ import os
import time
try:
- from urlparse import urlparse
+ from urllib.parse import urlparse
except ImportError:
from urllib.parse import urlparse # type: ignore
diff --git a/buildscripts/collect_resource_info.py b/buildscripts/collect_resource_info.py
index 43fb4930ab6..697179bcc8f 100755
--- a/buildscripts/collect_resource_info.py
+++ b/buildscripts/collect_resource_info.py
@@ -1,8 +1,8 @@
#!/usr/bin/env python
"""Collect system resource information on processes running in Evergreen on a given interval."""
-from __future__ import absolute_import
-from __future__ import print_function
+
+
from datetime import datetime
import optparse
@@ -24,13 +24,15 @@ def main():
"""Main."""
usage = "usage: %prog [options]"
parser = optparse.OptionParser(description=__doc__, usage=usage)
- parser.add_option("-i", "--interval", dest="interval", default=5, type="int",
- help="Collect system resource information every <interval> seconds. "
- "Default is every 5 seconds.")
- parser.add_option("-o", "--output-file", dest="outfile", default="-",
- help="If '-', then the file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout.")
+ parser.add_option(
+ "-i", "--interval", dest="interval", default=5, type="int",
+ help="Collect system resource information every <interval> seconds. "
+ "Default is every 5 seconds.")
+ parser.add_option(
+ "-o", "--output-file", dest="outfile", default="-",
+ help="If '-', then the file is written to stdout."
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout.")
(options, _) = parser.parse_args()
@@ -40,8 +42,9 @@ def main():
# Requires the Evergreen agent to be running on port 2285.
response = requests.get("http://localhost:2285/status")
if response.status_code != requests.codes.ok:
- print("Received a {} HTTP response: {}".format(response.status_code,
- response.text), file=sys.stderr)
+ print(
+ "Received a {} HTTP response: {}".format(response.status_code,
+ response.text), file=sys.stderr)
time.sleep(options.interval)
continue
diff --git a/buildscripts/combine_reports.py b/buildscripts/combine_reports.py
index 45fc0bcf38b..8245b3afb8f 100755
--- a/buildscripts/combine_reports.py
+++ b/buildscripts/combine_reports.py
@@ -1,8 +1,8 @@
#!/usr/bin/env python
"""Combine JSON report files used in Evergreen."""
-from __future__ import absolute_import
-from __future__ import print_function
+
+
import errno
import json
@@ -45,7 +45,7 @@ def check_error(input_count, output_count):
if (not input_count) and (not output_count):
raise ValueError("None of the input file(s) or output file exists")
- elif input_count and output_count:
+ if input_count and output_count:
raise ValueError("Both input file and output files exist")
@@ -53,10 +53,11 @@ def main():
"""Execute Main program."""
usage = "usage: %prog [options] report1.json report2.json ..."
parser = OptionParser(description=__doc__, usage=usage)
- parser.add_option("-o", "--output-file", dest="outfile", default="-",
- help=("If '-', then the combined report file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout."))
+ parser.add_option(
+ "-o", "--output-file", dest="outfile", default="-",
+ help=("If '-', then the combined report file is written to stdout."
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout."))
parser.add_option("-x", "--no-report-exit", dest="report_exit", default=True,
action="store_false",
help="Do not exit with a non-zero code if any test in the report fails.")
diff --git a/buildscripts/cpplint.py b/buildscripts/cpplint.py
index 75d0362a851..d567a417e45 100755
--- a/buildscripts/cpplint.py
+++ b/buildscripts/cpplint.py
@@ -456,7 +456,7 @@ _ALT_TOKEN_REPLACEMENT = {
# False positives include C-style multi-line comments and multi-line strings
# but those have always been troublesome for cpplint.
_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
- r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
+ r'[ =()](' + ('|'.join(list(_ALT_TOKEN_REPLACEMENT.keys()))) + r')(?=[ (]|$)')
# These constants define types of headers for use with
@@ -836,7 +836,7 @@ class _CppLintState(object):
def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
- for category, count in self.errors_by_category.iteritems():
+ for category, count in self.errors_by_category.items():
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count)
@@ -1389,7 +1389,7 @@ def FindEndOfExpressionInLine(line, startpos, stack):
On finding an unclosed expression: (-1, None)
Otherwise: (-1, new stack at end of this line)
"""
- for i in xrange(startpos, len(line)):
+ for i in range(startpos, len(line)):
char = line[i]
if char in '([{':
# Found start of parenthesized expression, push to expression stack
@@ -1682,7 +1682,7 @@ def CheckForCopyright(filename, lines, error):
# We'll say it should occur by line 10. Don't forget there's a
# dummy line at the front.
- for line in xrange(1, min(len(lines), 11)):
+ for line in range(1, min(len(lines), 11)):
if re.search(r'Copyright', lines[line], re.I):
CheckForServerSidePublicLicense(line, filename, lines, error)
break
@@ -1741,10 +1741,10 @@ def CheckForServerSidePublicLicense(copyright_offset, filename, lines, error):
# We expect the first line of the license header to follow shortly after the
# "Copyright" message.
- for line in xrange(copyright_offset, min(len(lines), copyright_offset + 3)):
+ for line in range(copyright_offset, min(len(lines), copyright_offset + 3)):
if re.search(r'This program is free software', lines[line]):
license_header_start_line = line
- for i in xrange(len(license_header)):
+ for i in range(len(license_header)):
line = i + license_header_start_line
if line >= len(lines) or lines[line] != license_header[i]:
error(filename, 0, 'legal/license', 5,
@@ -1904,7 +1904,7 @@ def CheckForBadCharacters(filename, lines, error):
error: The function to call with any errors found.
"""
for linenum, line in enumerate(lines):
- if u'\ufffd' in line:
+ if '\ufffd' in line:
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
if '\0' in line:
@@ -2950,7 +2950,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
if starting_func:
body_found = False
- for start_linenum in xrange(linenum, clean_lines.NumLines()):
+ for start_linenum in range(linenum, clean_lines.NumLines()):
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
@@ -3427,7 +3427,7 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
trailing_text = ''
if endpos > -1:
trailing_text = endline[endpos:]
- for offset in xrange(endlinenum + 1,
+ for offset in range(endlinenum + 1,
min(endlinenum + 3, clean_lines.NumLines() - 1)):
trailing_text += clean_lines.elided[offset]
if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text):
@@ -3596,7 +3596,7 @@ def IsRValueType(clean_lines, nesting_state, linenum, column):
# Look for the previous 'for(' in the previous lines.
before_text = match_symbol.group(1)
- for i in xrange(start - 1, max(start - 6, 0), -1):
+ for i in range(start - 1, max(start - 6, 0), -1):
before_text = clean_lines.elided[i] + before_text
if Search(r'for\s*\([^{};]*$', before_text):
# This is the condition inside a for-loop
@@ -3723,12 +3723,12 @@ def IsRValueAllowed(clean_lines, linenum):
True if line is within the region where RValue references are allowed.
"""
# Allow region marked by PUSH/POP macros
- for i in xrange(linenum, 0, -1):
+ for i in range(linenum, 0, -1):
line = clean_lines.elided[i]
if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
if not line.endswith('PUSH'):
return False
- for j in xrange(linenum, clean_lines.NumLines(), 1):
+ for j in range(linenum, clean_lines.NumLines(), 1):
line = clean_lines.elided[j]
if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
return line.endswith('POP')
@@ -4208,7 +4208,7 @@ def CheckCheck(filename, clean_lines, linenum, error):
expression = lines[linenum][start_pos + 1:end_pos - 1]
else:
expression = lines[linenum][start_pos + 1:]
- for i in xrange(linenum + 1, end_line):
+ for i in range(linenum + 1, end_line):
expression += lines[i]
expression += last_line[0:end_pos - 1]
@@ -4336,7 +4336,7 @@ def GetLineWidth(line):
The width of the line in column positions, accounting for Unicode
combining characters and wide characters.
"""
- if isinstance(line, unicode):
+ if isinstance(line, str):
width = 0
for uc in unicodedata.normalize('NFC', line):
if unicodedata.east_asian_width(uc) in ('W', 'F'):
@@ -4689,7 +4689,7 @@ def _GetTextInside(text, start_pattern):
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}', '[': ']'}
- closing_punctuation = set(matching_punctuation.itervalues())
+ closing_punctuation = set(matching_punctuation.values())
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
@@ -5015,7 +5015,7 @@ def IsDerivedFunction(clean_lines, linenum):
virt-specifier.
"""
# Scan back a few lines for start of current function
- for i in xrange(linenum, max(-1, linenum - 10), -1):
+ for i in range(linenum, max(-1, linenum - 10), -1):
match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
if match:
# Look for "override" after the matching closing parenthesis
@@ -5036,7 +5036,7 @@ def IsInitializerList(clean_lines, linenum):
True if current line appears to be inside constructor initializer
list, False otherwise.
"""
- for i in xrange(linenum, 1, -1):
+ for i in range(linenum, 1, -1):
line = clean_lines.elided[i]
if i == linenum:
remove_function_body = Match(r'^(.*)\{\s*$', line)
@@ -5132,7 +5132,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# Found the matching < on an earlier line, collect all
# pieces up to current line.
line = ''
- for i in xrange(startline, linenum + 1):
+ for i in range(startline, linenum + 1):
line += clean_lines.elided[i].strip()
# Check for non-const references in function parameters. A single '&' may
@@ -5156,7 +5156,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# appear inside the second set of parentheses on the current line as
# opposed to the first set.
if linenum > 0:
- for i in xrange(linenum - 1, max(0, linenum - 10), -1):
+ for i in range(linenum - 1, max(0, linenum - 10), -1):
previous_line = clean_lines.elided[i]
if not Search(r'[),]\s*$', previous_line):
break
@@ -5187,7 +5187,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# Don't see a whitelisted function on this line. Actually we
# didn't see any function name on this line, so this is likely a
# multi-line parameter list. Try a bit harder to catch this case.
- for i in xrange(2):
+ for i in range(2):
if (linenum > i and
Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
return
@@ -5349,7 +5349,7 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
# Try expanding current context to see if we one level of
# parentheses inside a macro.
if linenum > 0:
- for i in xrange(linenum - 1, max(0, linenum - 5), -1):
+ for i in range(linenum - 1, max(0, linenum - 5), -1):
context = clean_lines.elided[i] + context
if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
return False
@@ -5606,7 +5606,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
- for linenum in xrange(clean_lines.NumLines()):
+ for linenum in range(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
continue
@@ -5655,7 +5655,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
# include_dict is modified during iteration, so we iterate over a copy of
# the keys.
- header_keys = include_dict.keys()
+ header_keys = list(include_dict.keys())
for header in header_keys:
(same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
fullpath = common_path + header
@@ -5750,7 +5750,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
end_col = -1
end_line = -1
start_col = len(virtual.group(1))
- for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
+ for start_line in range(linenum, min(linenum + 3, clean_lines.NumLines())):
line = clean_lines.elided[start_line][start_col:]
parameter_list = Match(r'^([^(]*)\(', line)
if parameter_list:
@@ -5765,7 +5765,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
# Look for "override" or "final" after the parameter list
# (possibly on the next few lines).
- for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
+ for i in range(end_line, min(end_line + 3, clean_lines.NumLines())):
line = clean_lines.elided[i][end_col:]
match = Search(r'\b(override|final)\b', line)
if match:
@@ -5992,7 +5992,7 @@ def ProcessFileData(filename, file_extension, lines, error,
RemoveMultiLineComments(filename, lines, error)
clean_lines = CleansedLines(lines)
- for line in xrange(clean_lines.NumLines()):
+ for line in range(clean_lines.NumLines()):
ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, nesting_state, error,
extra_check_functions)
diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py
index 1060e6ed9f4..6faeffca589 100755
--- a/buildscripts/errorcodes.py
+++ b/buildscripts/errorcodes.py
@@ -5,14 +5,15 @@ Parses .cpp files for assertions and verifies assertion codes are distinct.
Optionally replaces zero codes in source code with new distinct values.
"""
-from __future__ import absolute_import
-from __future__ import print_function
+
+
import bisect
import os.path
import sys
from collections import defaultdict, namedtuple
from optparse import OptionParser
+from functools import reduce
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
@@ -44,21 +45,21 @@ list_files = False # pylint: disable=invalid-name
def parse_source_files(callback):
"""Walk MongoDB sourcefiles and invoke a callback for each AssertLocation found."""
- quick = ["assert", "Exception", "ErrorCodes::Error"]
+ quick = [b"assert", b"Exception", b"ErrorCodes::Error"]
patterns = [
- re.compile(r"(?:u|m(?:sg)?)asser(?:t|ted)(?:NoTrace)?\s*\(\s*(\d+)", re.MULTILINE),
- re.compile(r"(?:DB|Assertion)Exception\s*[({]\s*(\d+)", re.MULTILINE),
- re.compile(r"fassert(?:Failed)?(?:WithStatus)?(?:NoTrace)?(?:StatusOK)?\s*\(\s*(\d+)",
+ re.compile(b"(?:u|m(?:sg)?)asser(?:t|ted)(?:NoTrace)?\s*\(\s*(\d+)", re.MULTILINE),
+ re.compile(b"(?:DB|Assertion)Exception\s*[({]\s*(\d+)", re.MULTILINE),
+ re.compile(b"fassert(?:Failed)?(?:WithStatus)?(?:NoTrace)?(?:StatusOK)?\s*\(\s*(\d+)",
re.MULTILINE),
- re.compile(r"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE)
+ re.compile(b"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE)
]
for source_file in utils.get_all_source_files(prefix='src/mongo/'):
if list_files:
print('scanning file: ' + source_file)
- with open(source_file) as fh:
+ with open(source_file, 'rb') as fh:
text = fh.read()
if not any([zz in text for zz in quick]):
@@ -168,7 +169,7 @@ def read_error_codes():
print("EXCESSIVE SKIPPING OF ERROR CODES:")
print(" %s:%d:%d:%s" % (loc.sourceFile, line, col, loc.lines))
- for code, locations in dups.items():
+ for code, locations in list(dups.items()):
print("DUPLICATE IDS: %s" % code)
for loc in locations:
line, col = get_line_and_column_for_position(loc)
diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py
index bb365311640..aa962331c39 100755
--- a/buildscripts/eslint.py
+++ b/buildscripts/eslint.py
@@ -10,7 +10,7 @@ There is also a -d mode that assumes you only want to run one copy of ESLint per
parameter supplied. This lets ESLint search for candidate files to lint.
"""
-from __future__ import print_function
+
import os
import shutil
@@ -20,7 +20,7 @@ import sys
import tarfile
import tempfile
import threading
-import urllib
+import urllib.request, urllib.parse, urllib.error
from distutils import spawn # pylint: disable=no-name-in-module
from optparse import OptionParser
@@ -57,7 +57,7 @@ ESLINT_SOURCE_TAR_BASE = string.Template(ESLINT_PROGNAME + "-$platform-$arch")
def callo(args):
"""Call a program, and capture its output."""
- return subprocess.check_output(args)
+ return subprocess.check_output(args).decode('utf-8')
def extract_eslint(tar_path, target_file):
@@ -84,7 +84,7 @@ def get_eslint_from_cache(dest_file, platform, arch):
# Download the file
print("Downloading ESLint %s from %s, saving to %s" % (ESLINT_VERSION, url, temp_tar_file))
- urllib.urlretrieve(url, temp_tar_file)
+ urllib.request.urlretrieve(url, temp_tar_file)
eslint_distfile = ESLINT_SOURCE_TAR_BASE.substitute(platform=platform, arch=arch)
extract_eslint(temp_tar_file, eslint_distfile)
diff --git a/buildscripts/evergreen_gen_fuzzer_tests.py b/buildscripts/evergreen_gen_fuzzer_tests.py
index 161802fa4e4..0937cff7659 100755
--- a/buildscripts/evergreen_gen_fuzzer_tests.py
+++ b/buildscripts/evergreen_gen_fuzzer_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
"""Generate fuzzer tests to run in evergreen in parallel."""
-from __future__ import absolute_import
+
import argparse
import math
@@ -15,8 +15,8 @@ from shrub.task import TaskDependency
from shrub.variant import DisplayTaskDefinition
from shrub.variant import TaskSpec
-import util.read_config as read_config
-import util.taskname as taskname
+import buildscripts.util.read_config as read_config
+import buildscripts.util.taskname as taskname
CONFIG_DIRECTORY = "generated_resmoke_config"
diff --git a/buildscripts/evergreen_generate_resmoke_tasks.py b/buildscripts/evergreen_generate_resmoke_tasks.py
index a4505ad8929..2e33ef665eb 100755
--- a/buildscripts/evergreen_generate_resmoke_tasks.py
+++ b/buildscripts/evergreen_generate_resmoke_tasks.py
@@ -6,7 +6,7 @@ Analyze the evergreen history for tests run under the given task and create new
to attempt to keep the task runtime under a specified amount.
"""
-from __future__ import absolute_import
+
import argparse
import datetime
@@ -463,7 +463,7 @@ class TestStats(object):
def get_tests_runtimes(self):
"""Return the list of (test_file, runtime_in_secs) tuples ordered by decreasing runtime."""
tests = []
- for test_file, runtime_info in self._runtime_by_test.items():
+ for test_file, runtime_info in list(self._runtime_by_test.items()):
duration = runtime_info["duration"]
test_name = testname.get_short_name_from_test_file(test_file)
hook_runtime_info = self._hook_runtime_by_test[test_name]
@@ -536,9 +536,10 @@ class Main(object):
help="Target execution time (in minutes).")
parser.add_argument("--max-sub-suites", dest="max_sub_suites", type=int,
help="Max number of suites to divide into.")
- parser.add_argument("--fallback-num-sub-suites", dest="fallback_num_sub_suites", type=int,
- help="The number of suites to divide into if the Evergreen test "
- "statistics are not available.")
+ parser.add_argument(
+ "--fallback-num-sub-suites", dest="fallback_num_sub_suites", type=int,
+ help="The number of suites to divide into if the Evergreen test "
+ "statistics are not available.")
parser.add_argument("--project", dest="project", help="The Evergreen project to analyse.")
parser.add_argument("--resmoke-args", dest="resmoke_args",
help="Arguments to pass to resmoke calls.")
diff --git a/buildscripts/evergreen_resmoke_job_count.py b/buildscripts/evergreen_resmoke_job_count.py
index 098c65130cc..bf6d99bba9d 100644
--- a/buildscripts/evergreen_resmoke_job_count.py
+++ b/buildscripts/evergreen_resmoke_job_count.py
@@ -1,9 +1,6 @@
#!/usr/bin/env python
"""Determine the number of resmoke jobs to run."""
-from __future__ import division
-from __future__ import print_function
-
import argparse
import platform
import re
@@ -76,15 +73,18 @@ def main():
parser.add_argument("--taskName", dest="task", required=True, help="Task being executed.")
parser.add_argument("--buildVariant", dest="variant", required=True,
help="Build variant task is being executed on.")
- parser.add_argument("--jobFactor", dest="jobs_factor", type=float, default=1.0,
- help=("Job factor to use as a mulitplier with the number of CPUs. Defaults"
- " to %(default)s."))
- parser.add_argument("--jobsMax", dest="jobs_max", type=int, default=0,
- help=("Maximum number of jobs to use. Specify 0 to indicate the number of"
- " jobs is determined by --jobFactor and the number of CPUs. Defaults"
- " to %(default)s."))
- parser.add_argument("--outFile", dest="outfile", help=("File to write configuration to. If"
- " unspecified no file is generated."))
+ parser.add_argument(
+ "--jobFactor", dest="jobs_factor", type=float, default=1.0,
+ help=("Job factor to use as a mulitplier with the number of CPUs. Defaults"
+ " to %(default)s."))
+ parser.add_argument(
+ "--jobsMax", dest="jobs_max", type=int, default=0,
+ help=("Maximum number of jobs to use. Specify 0 to indicate the number of"
+ " jobs is determined by --jobFactor and the number of CPUs. Defaults"
+ " to %(default)s."))
+ parser.add_argument(
+ "--outFile", dest="outfile", help=("File to write configuration to. If"
+ " unspecified no file is generated."))
options = parser.parse_args()
diff --git a/buildscripts/evergreen_run_tests.py b/buildscripts/evergreen_run_tests.py
index ac6a88c2637..d42c4423a67 100755
--- a/buildscripts/evergreen_run_tests.py
+++ b/buildscripts/evergreen_run_tests.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Command line utility for executing MongoDB tests in Evergreen."""
-from __future__ import absolute_import
-
import collections
import os.path
import sys
diff --git a/buildscripts/evergreen_task_timeout.py b/buildscripts/evergreen_task_timeout.py
index 0269e411b6d..f69760dbbb1 100644
--- a/buildscripts/evergreen_task_timeout.py
+++ b/buildscripts/evergreen_task_timeout.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Determine the timeout value a task should use in evergreen."""
-from __future__ import absolute_import
-
import argparse
import sys
diff --git a/buildscripts/fetch_test_lifecycle.py b/buildscripts/fetch_test_lifecycle.py
index 6d3cfc8d5a7..4b8f65c74de 100755
--- a/buildscripts/fetch_test_lifecycle.py
+++ b/buildscripts/fetch_test_lifecycle.py
@@ -7,9 +7,6 @@ Usage:
python buildscsripts/fetch_test_lifecycle.py evergreen-project revision
"""
-from __future__ import absolute_import
-from __future__ import print_function
-
import logging
import optparse
import os
@@ -138,38 +135,44 @@ def main():
parser = optparse.OptionParser(
description=textwrap.dedent(main.__doc__), usage="Usage: %prog [options] evergreen-project")
- parser.add_option("--revision", dest="revision", metavar="<revision>", default="HEAD",
- help=("The project revision for which to retrieve the test lifecycle tags"
- " file."))
-
- parser.add_option("--metadataRepo", dest="metadata_repo_url", metavar="<metadata-repo-url>",
- default="git@github.com:mongodb/mongo-test-metadata.git",
- help=("The URL to the metadata repository that contains the test lifecycle"
- " tags file."))
-
- parser.add_option("--lifecycleFile", dest="lifecycle_file", metavar="<lifecycle-file>",
- default="etc/test_lifecycle.yml",
- help=("The path to the test lifecycle tags file, relative to the root of the"
- " metadata repository. Defaults to '%default'."))
-
- parser.add_option("--referencesFile", dest="references_file", metavar="<references-file>",
- default="references.yml",
- help=("The path to the metadata references file, relative to the root of the"
- " metadata repository. Defaults to '%default'."))
-
- parser.add_option("--destinationFile", dest="destination_file", metavar="<destination-file>",
- default="etc/test_lifecycle.yml",
- help=("The path where the lifecycle file should be available when this script"
- " completes successfully. This path is absolute or relative to the"
- " current working directory. Defaults to '%default'."))
+ parser.add_option(
+ "--revision", dest="revision", metavar="<revision>", default="HEAD",
+ help=("The project revision for which to retrieve the test lifecycle tags"
+ " file."))
+
+ parser.add_option(
+ "--metadataRepo", dest="metadata_repo_url", metavar="<metadata-repo-url>",
+ default="git@github.com:mongodb/mongo-test-metadata.git",
+ help=("The URL to the metadata repository that contains the test lifecycle"
+ " tags file."))
+
+ parser.add_option(
+ "--lifecycleFile", dest="lifecycle_file", metavar="<lifecycle-file>",
+ default="etc/test_lifecycle.yml",
+ help=("The path to the test lifecycle tags file, relative to the root of the"
+ " metadata repository. Defaults to '%default'."))
+
+ parser.add_option(
+ "--referencesFile", dest="references_file", metavar="<references-file>",
+ default="references.yml",
+ help=("The path to the metadata references file, relative to the root of the"
+ " metadata repository. Defaults to '%default'."))
+
+ parser.add_option(
+ "--destinationFile", dest="destination_file", metavar="<destination-file>",
+ default="etc/test_lifecycle.yml",
+ help=("The path where the lifecycle file should be available when this script"
+ " completes successfully. This path is absolute or relative to the"
+ " current working directory. Defaults to '%default'."))
parser.add_option("--logLevel", dest="log_level", metavar="<log-level>",
choices=["DEBUG", "INFO", "WARNING", "ERROR"], default="INFO",
help="The log level: DEBUG, INFO, WARNING or ERROR. Defaults to '%default'.")
- parser.add_option("--logFile", dest="log_file", metavar="<log-file>", default=None,
- help=("The destination file for the logs. If not set the script will log to"
- " the standard output"))
+ parser.add_option(
+ "--logFile", dest="log_file", metavar="<log-file>", default=None,
+ help=("The destination file for the logs. If not set the script will log to"
+ " the standard output"))
options, args = parser.parse_args()
diff --git a/buildscripts/gdb/mongo.py b/buildscripts/gdb/mongo.py
index cced216b659..01da27907b5 100644
--- a/buildscripts/gdb/mongo.py
+++ b/buildscripts/gdb/mongo.py
@@ -1,5 +1,4 @@
"""GDB commands for MongoDB."""
-from __future__ import print_function
import os
import re
@@ -22,12 +21,6 @@ except Exception as e:
print("Failed to load the libstdc++ pretty printers: " + str(e))
# pylint: enable=invalid-name,wildcard-import
-if sys.version_info[0] >= 3:
- # GDB only permits converting a gdb.Value instance to its numerical address when using the
- # long() constructor in Python 2 and not when using the int() constructor. We define the
- # 'long' class as an alias for the 'int' class in Python 3 for compatibility.
- long = int # pylint: disable=redefined-builtin,invalid-name
-
def get_process_name():
"""Return the main binary we are attached to."""
@@ -104,7 +97,7 @@ def get_decorations(obj):
decorable_t = decorable.type.template_argument(0)
decinfo_t = gdb.lookup_type('mongo::DecorationRegistry<{}>::DecorationInfo'.format(
str(decorable_t).replace("class", "").strip()))
- count = long((long(finish) - long(start)) / decinfo_t.sizeof)
+ count = int((int(finish) - int(start)) / decinfo_t.sizeof)
for i in range(count):
descriptor = start[i]
@@ -480,7 +473,7 @@ class MongoDBUniqueStack(gdb.Command):
"""Return the first tid."""
return stack['threads'][0]['gdb_thread_num']
- for stack in sorted(stacks.values(), key=first_tid, reverse=True):
+ for stack in sorted(list(stacks.values()), key=first_tid, reverse=True):
for i, thread in enumerate(stack['threads']):
prefix = '' if i == 0 else 'Duplicate '
print(prefix + thread['header'])
@@ -527,9 +520,10 @@ class MongoDBJavaScriptStack(gdb.Command):
if gdb.parse_and_eval(
'mongo::mozjs::kCurrentScope && mongo::mozjs::kCurrentScope->_inOp'):
gdb.execute('thread', from_tty=False, to_string=False)
- gdb.execute('printf "%s\\n", ' +
- 'mongo::mozjs::kCurrentScope->buildStackString().c_str()',
- from_tty=False, to_string=False)
+ gdb.execute(
+ 'printf "%s\\n", ' +
+ 'mongo::mozjs::kCurrentScope->buildStackString().c_str()', from_tty=False,
+ to_string=False)
except gdb.error as err:
print("Ignoring GDB error '%s' in javascript_stack" % str(err))
continue
diff --git a/buildscripts/gdb/mongo_lock.py b/buildscripts/gdb/mongo_lock.py
index fb864bb172a..382d5718e44 100644
--- a/buildscripts/gdb/mongo_lock.py
+++ b/buildscripts/gdb/mongo_lock.py
@@ -1,19 +1,11 @@
"""Mongo lock module."""
-from __future__ import print_function
-
import re
import sys
import gdb
import gdb.printing
-if sys.version_info[0] >= 3:
- # GDB only permits converting a gdb.Value instance to its numerical address when using the
- # long() constructor in Python 2 and not when using the int() constructor. We define the
- # 'long' class as an alias for the 'int' class in Python 3 for compatibility.
- long = int # pylint: disable=redefined-builtin,invalid-name
-
class NonExecutingThread(object):
"""NonExecutingThread class.
@@ -240,7 +232,7 @@ class Graph(object):
def find_thread(thread_dict, search_thread_id):
"""Find thread."""
- for (_, thread) in thread_dict.items():
+ for (_, thread) in list(thread_dict.items()):
if thread.thread_id == search_thread_id:
return thread
return None
@@ -307,8 +299,8 @@ def find_mutex_holder(graph, thread_dict, show):
print("Mutex at {} held by {} waited on by {}".format(mutex_value, mutex_holder,
mutex_waiter))
if graph:
- graph.add_edge(mutex_waiter, Lock(long(mutex_value), "Mutex"))
- graph.add_edge(Lock(long(mutex_value), "Mutex"), mutex_holder)
+ graph.add_edge(mutex_waiter, Lock(int(mutex_value), "Mutex"))
+ graph.add_edge(Lock(int(mutex_value), "Mutex"), mutex_holder)
def find_lock_manager_holders(graph, thread_dict, show): # pylint: disable=too-many-locals
@@ -355,8 +347,8 @@ def find_lock_manager_holders(graph, thread_dict, show): # pylint: disable=too-
print("MongoDB Lock at {} held by {} ({}) waited on by {}".format(
lock_head, lock_holder, lock_request["mode"], lock_waiter))
if graph:
- graph.add_edge(lock_waiter, Lock(long(lock_head), lock_request["mode"]))
- graph.add_edge(Lock(long(lock_head), lock_request["mode"]), lock_holder)
+ graph.add_edge(lock_waiter, Lock(int(lock_head), lock_request["mode"]))
+ graph.add_edge(Lock(int(lock_head), lock_request["mode"]), lock_holder)
lock_request_ptr = lock_request["next"]
diff --git a/buildscripts/gdb/mongo_printers.py b/buildscripts/gdb/mongo_printers.py
index 71948da8e05..bc9506eff85 100644
--- a/buildscripts/gdb/mongo_printers.py
+++ b/buildscripts/gdb/mongo_printers.py
@@ -1,5 +1,4 @@
"""GDB Pretty-printers for MongoDB."""
-from __future__ import print_function
import re
import struct
@@ -18,12 +17,6 @@ except ImportError as err:
print("Check with the pip command if pymongo 3.x is installed.")
bson = None
-if sys.version_info[0] >= 3:
- # GDB only permits converting a gdb.Value instance to its numerical address when using the
- # long() constructor in Python 2 and not when using the int() constructor. We define the
- # 'long' class as an alias for the 'int' class in Python 3 for compatibility.
- long = int # pylint: disable=redefined-builtin,invalid-name
-
def get_unique_ptr(obj):
"""Read the value of a libstdc++ std::unique_ptr."""
@@ -132,7 +125,7 @@ class BSONObjPrinter(object):
options = CodecOptions(document_class=collections.OrderedDict)
bsondoc = buf.decode(codec_options=options)
- for key, val in bsondoc.items():
+ for key, val in list(bsondoc.items()):
yield 'key', key
yield 'value', bson.json_util.dumps(val)
@@ -187,7 +180,7 @@ class DecorablePrinter(object):
decorable_t = val.type.template_argument(0)
decinfo_t = gdb.lookup_type('mongo::DecorationRegistry<{}>::DecorationInfo'.format(
str(decorable_t).replace("class", "").strip()))
- self.count = long((long(finish) - long(self.start)) / decinfo_t.sizeof)
+ self.count = int((int(finish) - int(self.start)) / decinfo_t.sizeof)
@staticmethod
def display_hint():
@@ -388,9 +381,8 @@ class AbslHashSetPrinterBase(object):
def to_string(self):
"""Return absl::[node/flat]_hash_set for printing."""
- return "absl::%s_hash_set<%s> with %s elems " % (self.to_str,
- self.val.type.template_argument(0),
- self.val["size_"])
+ return "absl::%s_hash_set<%s> with %s elems " % (
+ self.to_str, self.val.type.template_argument(0), self.val["size_"])
class AbslNodeHashSetPrinter(AbslHashSetPrinterBase):
@@ -438,10 +430,9 @@ class AbslHashMapPrinterBase(object):
def to_string(self):
"""Return absl::[node/flat]_hash_map for printing."""
- return "absl::%s_hash_map<%s, %s> with %s elems " % (self.to_str,
- self.val.type.template_argument(0),
- self.val.type.template_argument(1),
- self.val["size_"])
+ return "absl::%s_hash_map<%s, %s> with %s elems " % (
+ self.to_str, self.val.type.template_argument(0), self.val.type.template_argument(1),
+ self.val["size_"])
class AbslNodeHashMapPrinter(AbslHashMapPrinterBase):
diff --git a/buildscripts/generate-pip-constraints.sh b/buildscripts/generate-pip-constraints.sh
index 8789829d7c2..c00532724a7 100755
--- a/buildscripts/generate-pip-constraints.sh
+++ b/buildscripts/generate-pip-constraints.sh
@@ -64,10 +64,8 @@ if [[ -d $WORKING_DIR ]]; then
fi
ABSOLUTE_WORKING_DIR="$(mkdir -p "${WORKING_DIR}" && cd "${WORKING_DIR}" && pwd)"
-PIP2_DIR="${ABSOLUTE_WORKING_DIR}/python2"
PIP3_DIR="${ABSOLUTE_WORKING_DIR}/python3"
-generateConstraints python2 "${PIP2_DIR}"
generateConstraints python3 "${PIP3_DIR}"
if [[ -z $CON_FILE ]]; then
@@ -83,15 +81,7 @@ fi
printf '\n'
printf '\n# Common requirements\n'
- comm -12 "${PIP2_DIR}/requirements.txt" "${PIP3_DIR}/requirements.txt"
-
- printf '\n# Python2 requirements\n'
- comm -23 "${PIP2_DIR}/requirements.txt" "${PIP3_DIR}/requirements.txt" |
- sed -e 's/$/; python_version < "3"/'
-
- printf '\n# Python3 requirements\n'
- comm -13 "${PIP2_DIR}/requirements.txt" "${PIP3_DIR}/requirements.txt" |
- sed -e 's/$/; python_version > "3"/'
+ cat "${PIP3_DIR}/requirements.txt"
printf '\n'
cat "${SCRIPT_DIR}/../etc/pip/components/platform.req"
diff --git a/buildscripts/generate_compile_expansions.py b/buildscripts/generate_compile_expansions.py
index c3433d765b2..09cbb3f7934 100755
--- a/buildscripts/generate_compile_expansions.py
+++ b/buildscripts/generate_compile_expansions.py
@@ -6,8 +6,6 @@ Invoke by specifying an output file.
$ python generate_compile_expansions.py --out compile_expansions.yml
"""
-from __future__ import print_function
-
import argparse
import json
import os
diff --git a/buildscripts/generate_compile_expansions_shared_cache.py b/buildscripts/generate_compile_expansions_shared_cache.py
index cbba4b569d4..58c7f8b2fcf 100755
--- a/buildscripts/generate_compile_expansions_shared_cache.py
+++ b/buildscripts/generate_compile_expansions_shared_cache.py
@@ -6,8 +6,6 @@ Invoke by specifying an output file.
$ python generate_compile_expansions.py --out compile_expansions.yml
"""
-from __future__ import print_function
-
import argparse
import json
import os
diff --git a/buildscripts/git.py b/buildscripts/git.py
index f3db86a467c..f2374f269d2 100644
--- a/buildscripts/git.py
+++ b/buildscripts/git.py
@@ -1,30 +1,9 @@
"""Module to run git commands on a repository."""
-from __future__ import absolute_import
-
import logging
import os
import sys
-
-# The subprocess32 module resolves the thread-safety issues of the subprocess module in Python 2.x
-# when the _posixsubprocess C extension module is also available. Additionally, the _posixsubprocess
-# C extension module avoids triggering invalid free() calls on Python's internal data structure for
-# thread-local storage by skipping the PyOS_AfterFork() call when the 'preexec_fn' parameter isn't
-# specified to subprocess.Popen(). See SERVER-22219 for more details.
-#
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess # type: ignore
-else:
- import subprocess
+import subprocess
LOGGER = logging.getLogger(__name__)
@@ -203,7 +182,7 @@ class Repository(object): # pylint: disable=too-many-public-methods
params.extend(["rev-parse", "--show-toplevel"])
result = Repository._run_process("rev-parse", params)
result.check_returncode()
- return result.stdout.rstrip()
+ return result.stdout.decode('utf-8').rstrip()
@staticmethod
def current_repository():
@@ -214,7 +193,7 @@ class Repository(object): # pylint: disable=too-many-public-methods
"""Call git for this repository, and return the captured output."""
result = self._run_cmd(cmd, args)
result.check_returncode()
- return result.stdout
+ return result.stdout.decode('utf-8')
def _callgit(self, cmd, args, raise_exception=False):
"""
@@ -291,6 +270,7 @@ class GitCommandResult(object):
def check_returncode(self):
"""Raise GitException if the exit code is non-zero."""
if self.returncode:
- raise GitException("Command '{0}' failed with code '{1}'".format(
- " ".join(self.process_args), self.returncode), self.returncode, self.cmd,
- self.process_args, self.stdout, self.stderr)
+ raise GitException(
+ "Command '{0}' failed with code '{1}'".format(" ".join(self.process_args),
+ self.returncode), self.returncode,
+ self.cmd, self.process_args, self.stdout, self.stderr)
diff --git a/buildscripts/hang_analyzer.py b/buildscripts/hang_analyzer.py
index 2f244a4fc00..586024a11a1 100755
--- a/buildscripts/hang_analyzer.py
+++ b/buildscripts/hang_analyzer.py
@@ -11,7 +11,7 @@ A prototype hang analyzer for Evergreen integration to help investigate test tim
Supports Linux, MacOS X, Solaris, and Windows.
"""
-import StringIO
+import io
import csv
import glob
import itertools
@@ -60,7 +60,7 @@ def callo(args, logger):
"""Call subprocess on args string."""
logger.info("%s", str(args))
- return subprocess.check_output(args)
+ return subprocess.check_output(args).decode('utf-8')
def find_program(prog, paths):
@@ -186,7 +186,7 @@ class WindowsProcessList(object):
ret = callo([ps, "/FO", "CSV"], logger)
- buff = StringIO.StringIO(ret)
+ buff = io.StringIO(ret)
csv_reader = csv.reader(buff)
return [[int(row[1]), row[0]] for row in csv_reader if row[1] != "PID"]
@@ -201,7 +201,7 @@ class LLDBDumper(object):
"""Find the installed debugger."""
return find_program(debugger, ['/usr/bin'])
- def dump_info( # pylint: disable=too-many-arguments
+ def dump_info( # pylint: disable=too-many-arguments,too-many-locals
self, root_logger, logger, pid, process_name, take_dump):
"""Dump info."""
debugger = "lldb"
@@ -283,7 +283,7 @@ class DarwinProcessList(object):
ret = callo([ps, "-axco", "pid,comm"], logger)
- buff = StringIO.StringIO(ret)
+ buff = io.StringIO(ret)
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
return [[int(row[0]), row[1]] for row in csv_reader if row[0] != "PID"]
@@ -390,8 +390,8 @@ class GDBDumper(object):
"quit",
]
- call([dbg, "--quiet", "--nx"] +
- list(itertools.chain.from_iterable([['-ex', b] for b in cmds])), logger)
+ call([dbg, "--quiet", "--nx"] + list(
+ itertools.chain.from_iterable([['-ex', b] for b in cmds])), logger)
root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
@@ -428,7 +428,7 @@ class LinuxProcessList(object):
ret = callo([ps, "-eo", "pid,args"], logger)
- buff = StringIO.StringIO(ret)
+ buff = io.StringIO(ret)
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
@@ -450,7 +450,7 @@ class SolarisProcessList(object):
ret = callo([ps, "-eo", "pid,args"], logger)
- buff = StringIO.StringIO(ret)
+ buff = io.StringIO(ret)
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
@@ -562,7 +562,7 @@ def signal_process(logger, pid, signalnum):
logger.info("Waiting for process to report")
time.sleep(5)
- except OSError, err:
+ except OSError as err:
logger.error("Hit OS error trying to signal process: %s", err)
except AttributeError:
@@ -619,32 +619,34 @@ def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statem
process_ids = []
parser = OptionParser(description=__doc__)
- parser.add_option('-m', '--process-match', dest='process_match', choices=['contains', 'exact'],
- default='contains',
- help="Type of match for process names (-p & -g), specify 'contains', or"
- " 'exact'. Note that the process name match performs the following"
- " conversions: change all process names to lowecase, strip off the file"
- " extension, like '.exe' on Windows. Default is 'contains'.")
+ parser.add_option(
+ '-m', '--process-match', dest='process_match', choices=['contains', 'exact'],
+ default='contains', help="Type of match for process names (-p & -g), specify 'contains', or"
+ " 'exact'. Note that the process name match performs the following"
+ " conversions: change all process names to lowecase, strip off the file"
+ " extension, like '.exe' on Windows. Default is 'contains'.")
parser.add_option('-p', '--process-names', dest='process_names',
help='Comma separated list of process names to analyze')
parser.add_option('-g', '--go-process-names', dest='go_process_names',
help='Comma separated list of go process names to analyze')
- parser.add_option('-d', '--process-ids', dest='process_ids', default=None,
- help='Comma separated list of process ids (PID) to analyze, overrides -p &'
- ' -g')
+ parser.add_option(
+ '-d', '--process-ids', dest='process_ids', default=None,
+ help='Comma separated list of process ids (PID) to analyze, overrides -p &'
+ ' -g')
parser.add_option('-c', '--dump-core', dest='dump_core', action="store_true", default=False,
help='Dump core file for each analyzed process')
parser.add_option('-s', '--max-core-dumps-size', dest='max_core_dumps_size', default=10000,
help='Maximum total size of core dumps to keep in megabytes')
- parser.add_option('-o', '--debugger-output', dest='debugger_output', action="append",
- choices=['file', 'stdout'], default=None,
- help="If 'stdout', then the debugger's output is written to the Python"
- " process's stdout. If 'file', then the debugger's output is written"
- " to a file named debugger_<process>_<pid>.log for each process it"
- " attaches to. This option can be specified multiple times on the"
- " command line to have the debugger's output written to multiple"
- " locations. By default, the debugger's output is written only to the"
- " Python process's stdout.")
+ parser.add_option(
+ '-o', '--debugger-output', dest='debugger_output', action="append",
+ choices=['file', 'stdout'], default=None,
+ help="If 'stdout', then the debugger's output is written to the Python"
+ " process's stdout. If 'file', then the debugger's output is written"
+ " to a file named debugger_<process>_<pid>.log for each process it"
+ " attaches to. This option can be specified multiple times on the"
+ " command line to have the debugger's output written to multiple"
+ " locations. By default, the debugger's output is written only to the"
+ " Python process's stdout.")
(options, _) = parser.parse_args()
@@ -681,7 +683,7 @@ def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statem
processes = [(pid, pname) for (pid, pname) in all_processes
if pid in process_ids and pid != os.getpid()]
- running_pids = set([pid for (pid, pname) in all_processes])
+ running_pids = {pid for (pid, pname) in all_processes}
missing_pids = set(process_ids) - running_pids
if missing_pids:
root_logger.warning("The following requested process ids are not running %s",
@@ -716,8 +718,9 @@ def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statem
process_name) in [(p, pn) for (p, pn) in processes if not re.match("^(java|python)", pn)]:
process_logger = get_process_logger(options.debugger_output, pid, process_name)
try:
- dbg.dump_info(root_logger, process_logger, pid, process_name, options.dump_core
- and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
+ dbg.dump_info(
+ root_logger, process_logger, pid, process_name, options.dump_core
+ and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
except Exception as err: # pylint: disable=broad-except
root_logger.info("Error encountered when invoking debugger %s", err)
trapped_exceptions.append(traceback.format_exc())
diff --git a/buildscripts/idl/idl/ast.py b/buildscripts/idl/idl/ast.py
index a5e49e1738d..9e7bd7ca396 100644
--- a/buildscripts/idl/idl/ast.py
+++ b/buildscripts/idl/idl/ast.py
@@ -34,8 +34,6 @@ This is a lossy translation from the IDL Syntax tree as the IDL AST only contain
the enums and structs that need code generated for them, and just enough information to do that.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
from typing import List, Union, Any, Optional, Tuple
from . import common
@@ -78,10 +76,10 @@ class Global(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Global."""
- self.cpp_namespace = None # type: unicode
- self.cpp_includes = [] # type: List[unicode]
+ self.cpp_namespace = None # type: str
+ self.cpp_includes = [] # type: List[str]
self.configs = None # type: ConfigGlobal
super(Global, self).__init__(file_name, line, column)
@@ -97,11 +95,11 @@ class Struct(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a struct."""
- self.name = None # type: unicode
- self.cpp_name = None # type: unicode
- self.description = None # type: unicode
+ self.name = None # type: str
+ self.cpp_name = None # type: str
+ self.description = None # type: str
self.strict = True # type: bool
self.immutable = False # type: bool
self.inline_chained_structs = False # type: bool
@@ -114,9 +112,9 @@ class Expression(common.SourceLocation):
"""Literal of C++ expression representation."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Expression."""
- self.expr = None # type: unicode
+ self.expr = None # type: str
self.validate_constexpr = True # type: bool
self.export = False # type: bool
@@ -134,7 +132,7 @@ class Validator(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Validator."""
# Don't lint gt/lt as bad attribute names.
# pylint: disable=C0103
@@ -142,7 +140,7 @@ class Validator(common.SourceLocation):
self.lt = None # type: Expression
self.gte = None # type: Expression
self.lte = None # type: Expression
- self.callback = None # type: Optional[unicode]
+ self.callback = None # type: Optional[str]
super(Validator, self).__init__(file_name, line, column)
@@ -159,26 +157,26 @@ class Field(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Field."""
- self.name = None # type: unicode
- self.description = None # type: unicode
- self.cpp_name = None # type: unicode
+ self.name = None # type: str
+ self.description = None # type: str
+ self.cpp_name = None # type: str
self.optional = False # type: bool
self.ignore = False # type: bool
self.chained = False # type: bool
self.comparison_order = -1 # type: int
# Properties specific to fields which are types.
- self.cpp_type = None # type: unicode
- self.bson_serialization_type = None # type: List[unicode]
- self.serializer = None # type: unicode
- self.deserializer = None # type: unicode
- self.bindata_subtype = None # type: unicode
- self.default = None # type: unicode
+ self.cpp_type = None # type: str
+ self.bson_serialization_type = None # type: List[str]
+ self.serializer = None # type: str
+ self.deserializer = None # type: str
+ self.bindata_subtype = None # type: str
+ self.default = None # type: str
# Properties specific to fields which are structs.
- self.struct_type = None # type: unicode
+ self.struct_type = None # type: str
# Properties specific to fields which are arrays.
self.array = False # type: bool
@@ -208,9 +206,9 @@ class Command(Struct):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a command."""
- self.namespace = None # type: unicode
+ self.namespace = None # type: str
self.command_field = None # type: Field
super(Command, self).__init__(file_name, line, column)
@@ -223,10 +221,10 @@ class EnumValue(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Enum."""
- self.name = None # type: unicode
- self.value = None # type: unicode
+ self.name = None # type: str
+ self.value = None # type: str
super(EnumValue, self).__init__(file_name, line, column)
@@ -239,12 +237,12 @@ class Enum(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Enum."""
- self.name = None # type: unicode
- self.description = None # type: unicode
- self.cpp_namespace = None # type: unicode
- self.type = None # type: unicode
+ self.name = None # type: str
+ self.description = None # type: str
+ self.cpp_namespace = None # type: str
+ self.type = None # type: str
self.values = [] # type: List[EnumValue]
super(Enum, self).__init__(file_name, line, column)
@@ -254,11 +252,11 @@ class Condition(common.SourceLocation):
"""Condition(s) for a ServerParameter or ConfigOption."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Condition."""
- self.expr = None # type: unicode
- self.constexpr = None # type: unicode
- self.preprocessor = None # type: unicode
+ self.expr = None # type: str
+ self.constexpr = None # type: str
+ self.preprocessor = None # type: str
super(Condition, self).__init__(file_name, line, column)
@@ -267,11 +265,11 @@ class ServerParameterClass(common.SourceLocation):
"""ServerParameter as C++ class specialization."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ServerParameterClass."""
- self.name = None # type: unicode
- self.data = None # type: unicode
+ self.name = None # type: str
+ self.data = None # type: str
self.override_ctor = False # type: bool
self.override_set = False # type: bool
@@ -284,23 +282,23 @@ class ServerParameter(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ServerParameter."""
- self.name = None # type: unicode
- self.set_at = None # type: unicode
- self.description = None # type: unicode
+ self.name = None # type: str
+ self.set_at = None # type: str
+ self.description = None # type: str
self.cpp_class = None # type: ServerParameterClass
- self.cpp_vartype = None # type: unicode
- self.cpp_varname = None # type: unicode
+ self.cpp_vartype = None # type: str
+ self.cpp_varname = None # type: str
self.condition = None # type: Condition
self.redact = False # type: bool
self.test_only = False # type: bool
- self.deprecated_name = [] # type: List[unicode]
+ self.deprecated_name = [] # type: List[str]
self.default = None # type: Expression
# Only valid if cpp_varname is specified.
self.validator = None # type: Validator
- self.on_update = None # type: unicode
+ self.on_update = None # type: str
super(ServerParameter, self).__init__(file_name, line, column)
@@ -309,12 +307,12 @@ class GlobalInitializer(common.SourceLocation):
"""Initializer details for custom registration/storage."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a GlobalInitializer."""
- self.name = None # type: unicode
- self.register = None # type: unicode
- self.store = None # type: unicode
+ self.name = None # type: str
+ self.register = None # type: str
+ self.store = None # type: str
super(GlobalInitializer, self).__init__(file_name, line, column)
@@ -323,7 +321,7 @@ class ConfigGlobal(common.SourceLocation):
"""IDL ConfigOption Globals."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ConfigGlobal."""
# Other config globals are consumed in bind phase.
@@ -338,28 +336,28 @@ class ConfigOption(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ConfigOption."""
- self.name = None # type: unicode
- self.short_name = None # type: unicode
- self.deprecated_name = [] # type: List[unicode]
- self.deprecated_short_name = [] # type: List[unicode]
+ self.name = None # type: str
+ self.short_name = None # type: str
+ self.deprecated_name = [] # type: List[str]
+ self.deprecated_short_name = [] # type: List[str]
self.description = None # type: Expression
- self.section = None # type: unicode
- self.arg_vartype = None # type: unicode
- self.cpp_vartype = None # type: unicode
- self.cpp_varname = None # type: unicode
+ self.section = None # type: str
+ self.arg_vartype = None # type: str
+ self.cpp_vartype = None # type: str
+ self.cpp_varname = None # type: str
self.condition = None # type: Condition
- self.conflicts = [] # type: List[unicode]
- self.requires = [] # type: List[unicode]
+ self.conflicts = [] # type: List[str]
+ self.requires = [] # type: List[str]
self.hidden = False # type: bool
self.redact = False # type: bool
self.default = None # type: Expression
self.implicit = None # type: Expression
- self.source = None # type: unicode
- self.canonicalize = None # type: unicode
+ self.source = None # type: str
+ self.canonicalize = None # type: str
self.duplicates_append = False # type: bool
self.positional_start = None # type: int
diff --git a/buildscripts/idl/idl/binder.py b/buildscripts/idl/idl/binder.py
index 1214bc2c36e..051d08b0671 100644
--- a/buildscripts/idl/idl/binder.py
+++ b/buildscripts/idl/idl/binder.py
@@ -28,8 +28,6 @@
# pylint: disable=too-many-lines
"""Transform idl.syntax trees from the parser into well-defined idl.ast trees."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import re
from typing import cast, List, Set, Union
@@ -43,7 +41,7 @@ from . import syntax
def _validate_single_bson_type(ctxt, idl_type, syntax_type):
- # type: (errors.ParserContext, Union[syntax.Type, ast.Field], unicode) -> bool
+ # type: (errors.ParserContext, Union[syntax.Type, ast.Field], str) -> bool
"""Validate bson serialization type is correct for a type."""
bson_type = idl_type.bson_serialization_type[0]
@@ -72,7 +70,7 @@ def _validate_single_bson_type(ctxt, idl_type, syntax_type):
def _validate_bson_types_list(ctxt, idl_type, syntax_type):
- # type: (errors.ParserContext, Union[syntax.Type, ast.Field], unicode) -> bool
+ # type: (errors.ParserContext, Union[syntax.Type, ast.Field], str) -> bool
"""Validate bson serialization type(s) is correct for a type."""
bson_types = idl_type.bson_serialization_type
@@ -113,7 +111,7 @@ def _validate_type(ctxt, idl_type):
def _validate_cpp_type(ctxt, idl_type, syntax_type):
- # type: (errors.ParserContext, Union[syntax.Type, ast.Field], unicode) -> None
+ # type: (errors.ParserContext, Union[syntax.Type, ast.Field], str) -> None
"""Validate the cpp_type is correct."""
# Validate cpp_type
@@ -158,7 +156,7 @@ def _validate_cpp_type(ctxt, idl_type, syntax_type):
def _validate_chain_type_properties(ctxt, idl_type, syntax_type):
- # type: (errors.ParserContext, Union[syntax.Type, ast.Field], unicode) -> None
+ # type: (errors.ParserContext, Union[syntax.Type, ast.Field], str) -> None
"""Validate a chained type has both a deserializer and serializer."""
assert len(
idl_type.bson_serialization_type) == 1 and idl_type.bson_serialization_type[0] == 'chain'
@@ -173,7 +171,7 @@ def _validate_chain_type_properties(ctxt, idl_type, syntax_type):
def _validate_type_properties(ctxt, idl_type, syntax_type):
- # type: (errors.ParserContext, Union[syntax.Type, ast.Field], unicode) -> None
+ # type: (errors.ParserContext, Union[syntax.Type, ast.Field], str) -> None
# pylint: disable=too-many-branches
"""Validate each type or field is correct."""
@@ -233,7 +231,7 @@ def _validate_types(ctxt, parsed_spec):
def _is_duplicate_field(ctxt, field_container, fields, ast_field):
- # type: (errors.ParserContext, unicode, List[ast.Field], ast.Field) -> bool
+ # type: (errors.ParserContext, str, List[ast.Field], ast.Field) -> bool
"""Return True if there is a naming conflict for a given field."""
# This is normally tested in the parser as part of duplicate detection in a map
@@ -494,7 +492,7 @@ def _validate_doc_sequence_field(ctxt, ast_field):
def _normalize_method_name(cpp_type_name, cpp_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Normalize the method name to be fully-qualified with the type name."""
# Default deserializer
if not cpp_method_name:
@@ -540,7 +538,7 @@ def _bind_expression(expr, allow_literal_string=True):
# int32_t
try:
intval = int(expr.literal)
- if (intval >= -0x80000000) and (intval <= 0x7FFFFFFF):
+ if intval >= -0x80000000 and intval <= 0x7FFFFFFF: # pylint: disable=chained-comparison
node.expr = repr(intval)
return node
except ValueError:
@@ -847,7 +845,7 @@ def _validate_enum_int(ctxt, idl_enum):
min_value = min(int_values_set)
max_value = max(int_values_set)
- valid_int = {x for x in xrange(min_value, max_value + 1)}
+ valid_int = {x for x in range(min_value, max_value + 1)}
if valid_int != int_values_set:
ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name)
@@ -879,7 +877,7 @@ def _bind_enum(ctxt, idl_enum):
ast_enum_value.value = enum_value.value
ast_enum.values.append(ast_enum_value)
- values_set = set() # type: Set[unicode]
+ values_set = set() # type: Set[str]
for enum_value in idl_enum.values:
values_set.add(enum_value.value)
@@ -953,7 +951,7 @@ def _bind_server_parameter_with_storage(ctxt, ast_param, param):
def _bind_server_parameter_set_at(ctxt, param):
- # type: (errors.ParserContext, syntax.ServerParameter) -> unicode
+ # type: (errors.ParserContext, syntax.ServerParameter) -> str
"""Translate set_at options to C++ enum value."""
set_at = 0
@@ -1003,13 +1001,13 @@ def _bind_server_parameter(ctxt, param):
def _is_invalid_config_short_name(name):
- # type: (unicode) -> bool
+ # type: (str) -> bool
"""Check if a given name is valid as a short name."""
return ('.' in name) or (',' in name)
def _parse_config_option_sources(source_list):
- # type: (List[unicode]) -> unicode
+ # type: (List[str]) -> str
"""Parse source list into enum value used by runtime."""
sources = 0
if not source_list:
diff --git a/buildscripts/idl/idl/bson.py b/buildscripts/idl/idl/bson.py
index d23d0d34458..ffed607048d 100644
--- a/buildscripts/idl/idl/bson.py
+++ b/buildscripts/idl/idl/bson.py
@@ -31,8 +31,6 @@ BSON Type Information.
Utilities for validating bson types, etc.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
from typing import Dict, List
# Dictionary of BSON type Information
@@ -100,7 +98,7 @@ def cpp_bson_type_name(name):
def list_valid_types():
# type: () -> List[unicode]
"""Return a list of supported bson types."""
- return [a for a in _BSON_TYPE_INFORMATION.iterkeys()]
+ return [a for a in _BSON_TYPE_INFORMATION.keys()]
def is_valid_bindata_subtype(name):
diff --git a/buildscripts/idl/idl/common.py b/buildscripts/idl/idl/common.py
index 8a029b0ca97..39f5f032183 100644
--- a/buildscripts/idl/idl/common.py
+++ b/buildscripts/idl/idl/common.py
@@ -31,8 +31,6 @@ IDL Common classes.
Classes which are shared among both the IDL idl.syntax and idl.AST trees.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
import os
import string
from typing import Mapping
@@ -44,7 +42,7 @@ COMMAND_NAMESPACE_TYPE = "type"
def title_case(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Return a CapitalCased version of a string."""
# Only capitalize the last part of a fully-qualified name
@@ -56,13 +54,13 @@ def title_case(name):
def camel_case(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Return a camelCased version of a string."""
return name[0:1].lower() + name[1:]
def qualify_cpp_name(cpp_namespace, cpp_type_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Preprend a type name with a C++ namespace if cpp_namespace is not None."""
if cpp_namespace:
return cpp_namespace + "::" + cpp_type_name
@@ -71,7 +69,7 @@ def qualify_cpp_name(cpp_namespace, cpp_type_name):
def _escape_template_string(template):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Escape the '$' in template strings unless followed by '{'."""
# See https://docs.python.org/2/library/string.html#template-strings
template = template.replace('${', '#{')
@@ -80,20 +78,20 @@ def _escape_template_string(template):
def template_format(template, template_params=None):
- # type: (unicode, Mapping[unicode,unicode]) -> unicode
+ # type: (str, Mapping[str,str]) -> str
"""Write a template to the stream."""
- # Ignore the types since we use unicode literals and this expects str but works fine with
- # unicode.
+ # Ignore the types since we use str literals and this expects str but works fine with
+ # str.
# See https://docs.python.org/2/library/string.html#template-strings
template = _escape_template_string(template)
return string.Template(template).substitute(template_params) # type: ignore
def template_args(template, **kwargs):
- # type: (unicode, **unicode) -> unicode
+ # type: (str, **str) -> str
"""Write a template to the stream."""
- # Ignore the types since we use unicode literals and this expects str but works fine with
- # unicode.
+ # Ignore the types since we use str literals and this expects str but works fine with
+ # str.
# See https://docs.python.org/2/library/string.html#template-strings
template = _escape_template_string(template)
return string.Template(template).substitute(kwargs) # type: ignore
@@ -103,7 +101,7 @@ class SourceLocation(object):
"""Source location information about an idl.syntax or idl.AST object."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a source location."""
self.file_name = file_name
self.line = line
diff --git a/buildscripts/idl/idl/compiler.py b/buildscripts/idl/idl/compiler.py
index 5f1960c3bca..0809bc88514 100644
--- a/buildscripts/idl/idl/compiler.py
+++ b/buildscripts/idl/idl/compiler.py
@@ -31,8 +31,6 @@ IDL compiler driver.
Orchestrates the 3 passes (parser, binder, and generator) together.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
import io
import logging
import os
@@ -54,14 +52,14 @@ class CompilerArgs(object):
def __init__(self):
# type: () -> None
"""Create a container for compiler arguments."""
- self.import_directories = None # type: List[unicode]
- self.input_file = None # type: unicode
- self.target_arch = None # type: unicode
+ self.import_directories = None # type: List[str]
+ self.input_file = None # type: str
+ self.target_arch = None # type: str
- self.output_source = None # type: unicode
- self.output_header = None # type: unicode
- self.output_base_dir = None # type: unicode
- self.output_suffix = None # type: unicode
+ self.output_source = None # type: str
+ self.output_header = None # type: str
+ self.output_base_dir = None # type: str
+ self.output_suffix = None # type: str
self.write_dependencies = False # type: bool
self.write_dependencies_inline = False # type: bool
@@ -71,14 +69,14 @@ class CompilerImportResolver(parser.ImportResolverBase):
"""Class for the IDL compiler to resolve imported files."""
def __init__(self, import_directories):
- # type: (List[unicode]) -> None
+ # type: (List[str]) -> None
"""Construct a ImportResolver."""
self._import_directories = import_directories
super(CompilerImportResolver, self).__init__()
def resolve(self, base_file, imported_file_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Return the complete path to an imported file name."""
logging.debug("Resolving imported file '%s' for file '%s'", imported_file_name, base_file)
@@ -109,7 +107,7 @@ class CompilerImportResolver(parser.ImportResolverBase):
raise errors.IDLError(msg)
def open(self, resolved_file_name):
- # type: (unicode) -> Any
+ # type: (str) -> Any
"""Return an io.Stream for the requested file."""
return io.open(resolved_file_name, encoding='utf-8')
@@ -129,7 +127,7 @@ def _write_dependencies(spec, write_dependencies_inline):
def _update_import_includes(args, spec, header_file_name):
- # type: (CompilerArgs, syntax.IDLSpec, unicode) -> None
+ # type: (CompilerArgs, syntax.IDLSpec, str) -> None
"""Update the list of imports with a list of include files for each import with structs."""
# This function is fragile:
# In order to try to generate headers with an "include what you use" set of headers, the IDL
diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py
index 4cd5489409e..99ff49507e6 100644
--- a/buildscripts/idl/idl/cpp_types.py
+++ b/buildscripts/idl/idl/cpp_types.py
@@ -27,8 +27,6 @@
#
"""IDL C++ Code Generator."""
-from __future__ import absolute_import, print_function, unicode_literals
-
from abc import ABCMeta, abstractmethod
import string
import textwrap
@@ -43,7 +41,7 @@ _STD_ARRAY_UINT8_16 = 'std::array<std::uint8_t,16>'
def is_primitive_scalar_type(cpp_type):
- # type: (unicode) -> bool
+ # type: (str) -> bool
"""
Return True if a cpp_type is a primitive scalar type.
@@ -56,7 +54,7 @@ def is_primitive_scalar_type(cpp_type):
def get_primitive_scalar_type_default_value(cpp_type):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""
Return a default value for a primitive scalar type.
@@ -70,26 +68,26 @@ def get_primitive_scalar_type_default_value(cpp_type):
def is_primitive_type(cpp_type):
- # type: (unicode) -> bool
+ # type: (str) -> bool
"""Return True if a cpp_type is a primitive type and should not be returned as reference."""
cpp_type = cpp_type.replace(' ', '')
return is_primitive_scalar_type(cpp_type) or cpp_type == _STD_ARRAY_UINT8_16
def _qualify_optional_type(cpp_type):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Qualify the type as optional."""
return 'boost::optional<%s>' % (cpp_type)
def _qualify_array_type(cpp_type):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Qualify the type if the field is an array."""
return "std::vector<%s>" % (cpp_type)
def _optionally_make_call(method_name, param):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Return a call to method_name if it is not None, otherwise return an empty string."""
if not method_name:
return ''
@@ -97,11 +95,9 @@ def _optionally_make_call(method_name, param):
return "%s(%s);" % (method_name, param)
-class CppTypeBase(object):
+class CppTypeBase(metaclass=ABCMeta):
"""Base type for C++ Type information."""
- __metaclass__ = ABCMeta
-
def __init__(self, field):
# type: (ast.Field) -> None
"""Construct a CppTypeBase."""
@@ -109,19 +105,19 @@ class CppTypeBase(object):
@abstractmethod
def get_type_name(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the C++ type name for a field."""
pass
@abstractmethod
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the C++ type name for the storage of class member for a field."""
pass
@abstractmethod
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the C++ type name for the getter/setter parameter for a field."""
pass
@@ -151,25 +147,25 @@ class CppTypeBase(object):
@abstractmethod
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Get the body of the getter."""
pass
@abstractmethod
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Get the body of the setter."""
pass
@abstractmethod
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
"""Get the expression to transform the input expression into the getter type."""
pass
@abstractmethod
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
"""Get the expression to transform the input expression into the setter type."""
pass
@@ -178,7 +174,7 @@ class _CppTypeBasic(CppTypeBase):
"""Default class for C++ Type information. Does not handle view types."""
def get_type_name(self):
- # type: () -> unicode
+ # type: () -> str
if self._field.struct_type:
cpp_type = common.title_case(self._field.struct_type)
else:
@@ -187,11 +183,11 @@ class _CppTypeBasic(CppTypeBase):
return cpp_type
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return self.get_type_name()
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return self.get_type_name()
def is_const_type(self):
@@ -225,22 +221,22 @@ class _CppTypeBasic(CppTypeBase):
return False
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
return common.template_args('return ${member_name};', member_name=member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
return common.template_args(
'${optionally_call_validator} ${member_name} = std::move(value);',
optionally_call_validator=_optionally_make_call(validator_method_name,
'value'), member_name=member_name)
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return None
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return None
@@ -248,21 +244,21 @@ class _CppTypeView(CppTypeBase):
"""Base type for C++ View Types information."""
def __init__(self, field, storage_type, view_type):
- # type: (ast.Field, unicode, unicode) -> None
+ # type: (ast.Field, str, str) -> None
self._storage_type = storage_type
self._view_type = view_type
super(_CppTypeView, self).__init__(field)
def get_type_name(self):
- # type: () -> unicode
+ # type: () -> str
return self._storage_type
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return self._storage_type
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return self._view_type
def is_const_type(self):
@@ -282,11 +278,11 @@ class _CppTypeView(CppTypeBase):
return True
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
return common.template_args('return ${member_name};', member_name=member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
return common.template_args(
'auto _tmpValue = ${value}; ${optionally_call_validator} ${member_name} = std::move(_tmpValue);',
member_name=member_name, optionally_call_validator=_optionally_make_call(
@@ -294,11 +290,11 @@ class _CppTypeView(CppTypeBase):
'_tmpValue'), value=self.get_transform_to_storage_type("value"))
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return None
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return common.template_args(
'${expression}.toString()',
expression=expression,
@@ -309,15 +305,15 @@ class _CppTypeVector(CppTypeBase):
"""Base type for C++ Std::Vector Types information."""
def get_type_name(self):
- # type: () -> unicode
+ # type: () -> str
return 'std::vector<std::uint8_t>'
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return self.get_type_name()
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return 'ConstDataRange'
def is_const_type(self):
@@ -337,12 +333,12 @@ class _CppTypeVector(CppTypeBase):
return True
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
return common.template_args('return ConstDataRange(${member_name});',
member_name=member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
return common.template_args(
'auto _tmpValue = ${value}; ${optionally_call_validator} ${member_name} = std::move(_tmpValue);',
member_name=member_name, optionally_call_validator=_optionally_make_call(
@@ -350,11 +346,11 @@ class _CppTypeVector(CppTypeBase):
'_tmpValue'), value=self.get_transform_to_storage_type("value"))
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return common.template_args('ConstDataRange(${expression});', expression=expression)
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return common.template_args(
'std::vector<std::uint8_t>(reinterpret_cast<const uint8_t*>(${expression}.data()), ' +
'reinterpret_cast<const uint8_t*>(${expression}.data()) + ${expression}.length())',
@@ -370,15 +366,15 @@ class _CppTypeDelegating(CppTypeBase):
super(_CppTypeDelegating, self).__init__(field)
def get_type_name(self):
- # type: () -> unicode
+ # type: () -> str
return self._base.get_type_name()
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return self._base.get_storage_type()
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return self._base.get_getter_setter_type()
def is_const_type(self):
@@ -398,19 +394,19 @@ class _CppTypeDelegating(CppTypeBase):
return self._base.is_view_type()
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
return self._base.get_getter_body(member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
return self._base.get_setter_body(member_name, validator_method_name)
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return self._base.get_transform_to_getter_type(expression)
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
return self._base.get_transform_to_storage_type(expression)
@@ -418,11 +414,11 @@ class _CppTypeArray(_CppTypeDelegating):
"""C++ Array type for wrapping a base C++ Type information."""
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return _qualify_array_type(self._base.get_storage_type())
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return _qualify_array_type(self._base.get_getter_setter_type())
def return_by_reference(self):
@@ -436,14 +432,14 @@ class _CppTypeArray(_CppTypeDelegating):
return True
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
convert = self.get_transform_to_getter_type(member_name)
if convert:
return common.template_args('return ${convert};', convert=convert)
return self._base.get_getter_body(member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
convert = self.get_transform_to_storage_type("value")
if convert:
return common.template_args(
@@ -453,7 +449,7 @@ class _CppTypeArray(_CppTypeDelegating):
return self._base.get_setter_body(member_name, validator_method_name)
def get_transform_to_getter_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
if self._base.get_storage_type() != self._base.get_getter_setter_type():
return common.template_args(
'transformVector(${expression})',
@@ -462,7 +458,7 @@ class _CppTypeArray(_CppTypeDelegating):
return None
def get_transform_to_storage_type(self, expression):
- # type: (unicode) -> Optional[unicode]
+ # type: (str) -> Optional[str]
if self._base.get_storage_type() != self._base.get_getter_setter_type():
return common.template_args(
'transformVector(${expression})',
@@ -475,11 +471,11 @@ class _CppTypeOptional(_CppTypeDelegating):
"""Base type for Optional C++ Type information which wraps C++ types."""
def get_storage_type(self):
- # type: () -> unicode
+ # type: () -> str
return _qualify_optional_type(self._base.get_storage_type())
def get_getter_setter_type(self):
- # type: () -> unicode
+ # type: () -> str
return _qualify_optional_type(self._base.get_getter_setter_type())
def disable_xvalue(self):
@@ -493,7 +489,7 @@ class _CppTypeOptional(_CppTypeDelegating):
return self._base.return_by_reference()
def get_getter_body(self, member_name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
base_expression = common.template_args("${member_name}.get()", member_name=member_name)
convert = self._base.get_transform_to_getter_type(base_expression)
@@ -517,7 +513,7 @@ class _CppTypeOptional(_CppTypeDelegating):
return common.template_args('return ${member_name};', member_name=member_name)
def get_setter_body(self, member_name, validator_method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
convert = self._base.get_transform_to_storage_type("value.get()")
if convert or validator_method_name:
if not convert:
@@ -558,11 +554,9 @@ def get_cpp_type(field):
return cpp_type_info
-class BsonCppTypeBase(object):
+class BsonCppTypeBase(object, metaclass=ABCMeta):
"""Base type for custom C++ support for BSON Types information."""
- __metaclass__ = ABCMeta
-
def __init__(self, field):
# type: (ast.Field) -> None
"""Construct a BsonCppTypeBase."""
@@ -570,7 +564,7 @@ class BsonCppTypeBase(object):
@abstractmethod
def gen_deserializer_expression(self, indented_writer, object_instance):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
"""Generate code with the text writer and return an expression to deserialize the type."""
pass
@@ -582,13 +576,13 @@ class BsonCppTypeBase(object):
@abstractmethod
def gen_serializer_expression(self, indented_writer, expression):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
"""Generate code with the text writer and return an expression to serialize the type."""
pass
def _call_method_or_global_function(expression, method_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""
Given a fully-qualified method name, call it correctly.
@@ -609,12 +603,12 @@ class _CommonBsonCppTypeBase(BsonCppTypeBase):
"""Custom C++ support for basic BSON types."""
def __init__(self, field, deserialize_method_name):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
self._deserialize_method_name = deserialize_method_name
super(_CommonBsonCppTypeBase, self).__init__(field)
def gen_deserializer_expression(self, indented_writer, object_instance):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
return common.template_args('${object_instance}.${method_name}()',
object_instance=object_instance,
method_name=self._deserialize_method_name)
@@ -624,7 +618,7 @@ class _CommonBsonCppTypeBase(BsonCppTypeBase):
return self._field.serializer is not None
def gen_serializer_expression(self, indented_writer, expression):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
return _call_method_or_global_function(expression, self._field.serializer)
@@ -632,7 +626,7 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
"""Custom C++ support for object BSON types."""
def gen_deserializer_expression(self, indented_writer, object_instance):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
if self._field.deserializer:
# Call a method like: Class::method(const BSONObj& value)
indented_writer.write_line(
@@ -648,7 +642,7 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
return self._field.serializer is not None
def gen_serializer_expression(self, indented_writer, expression):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
method_name = writer.get_method_name(self._field.serializer)
indented_writer.write_line(
common.template_args('const BSONObj localObject = ${expression}.${method_name}();',
@@ -660,7 +654,7 @@ class _BinDataBsonCppTypeBase(BsonCppTypeBase):
"""Custom C++ support for all binData BSON types."""
def gen_deserializer_expression(self, indented_writer, object_instance):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
if self._field.bindata_subtype == 'uuid':
return common.template_args('${object_instance}.uuid()',
object_instance=object_instance)
@@ -672,7 +666,7 @@ class _BinDataBsonCppTypeBase(BsonCppTypeBase):
return True
def gen_serializer_expression(self, indented_writer, expression):
- # type: (writer.IndentedTextWriter, unicode) -> unicode
+ # type: (writer.IndentedTextWriter, str) -> str
if self._field.serializer:
method_name = writer.get_method_name(self._field.serializer)
indented_writer.write_line(
diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py
index 96c4ead641f..edccacbd327 100644
--- a/buildscripts/idl/idl/enum_types.py
+++ b/buildscripts/idl/idl/enum_types.py
@@ -31,8 +31,6 @@ IDL Enum type information.
Support the code generation for enums
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
from abc import ABCMeta, abstractmethod
import textwrap
from typing import cast, List, Optional, Union
@@ -43,11 +41,9 @@ from . import syntax
from . import writer
-class EnumTypeInfoBase(object):
+class EnumTypeInfoBase(object, metaclass=ABCMeta):
"""Base type for enumeration type information."""
- __metaclass__ = ABCMeta
-
def __init__(self, idl_enum):
# type: (Union[syntax.Enum,ast.Enum]) -> None
"""Construct a EnumTypeInfoBase."""
@@ -73,8 +69,8 @@ class EnumTypeInfoBase(object):
def _get_enum_deserializer_name(self):
# type: () -> unicode
"""Return the name of deserializer function without prefix."""
- return common.template_args("${enum_name}_parse", enum_name=common.title_case(
- self._enum.name))
+ return common.template_args("${enum_name}_parse",
+ enum_name=common.title_case(self._enum.name))
def get_enum_deserializer_name(self):
# type: () -> unicode
@@ -85,8 +81,8 @@ class EnumTypeInfoBase(object):
def _get_enum_serializer_name(self):
# type: () -> unicode
"""Return the name of serializer function without prefix."""
- return common.template_args("${enum_name}_serializer", enum_name=common.title_case(
- self._enum.name))
+ return common.template_args("${enum_name}_serializer",
+ enum_name=common.title_case(self._enum.name))
def get_enum_serializer_name(self):
# type: () -> unicode
@@ -114,7 +110,7 @@ class EnumTypeInfoBase(object):
@abstractmethod
def get_serializer_declaration(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the serializer function declaration minus trailing semicolon."""
pass
@@ -125,11 +121,9 @@ class EnumTypeInfoBase(object):
pass
-class _EnumTypeInt(EnumTypeInfoBase):
+class _EnumTypeInt(EnumTypeInfoBase, metaclass=ABCMeta):
"""Type information for integer enumerations."""
- __metaclass__ = ABCMeta
-
def get_cpp_type_name(self):
# type: () -> unicode
return common.title_case(self._enum.name)
@@ -174,7 +168,7 @@ class _EnumTypeInt(EnumTypeInfoBase):
"""))
def get_serializer_declaration(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the serializer function declaration minus trailing semicolon."""
return common.template_args("std::int32_t ${function_name}(${enum_name} value)",
enum_name=self.get_cpp_type_name(),
@@ -200,15 +194,13 @@ def _get_constant_enum_name(idl_enum, enum_value):
name=enum_value.name)
-class _EnumTypeString(EnumTypeInfoBase):
+class _EnumTypeString(EnumTypeInfoBase, metaclass=ABCMeta):
"""Type information for string enumerations."""
- __metaclass__ = ABCMeta
-
def get_cpp_type_name(self):
# type: () -> unicode
- return common.template_args("${enum_name}Enum", enum_name=common.title_case(
- self._enum.name))
+ return common.template_args("${enum_name}Enum",
+ enum_name=common.title_case(self._enum.name))
def get_bson_types(self):
# type: () -> List[unicode]
@@ -236,24 +228,25 @@ class _EnumTypeString(EnumTypeInfoBase):
with writer.NamespaceScopeBlock(indented_writer, ['']):
for enum_value in self._enum.values:
indented_writer.write_line(
- common.template_args('constexpr StringData ${constant_name} = "${value}"_sd;',
- constant_name=_get_constant_enum_name(
- self._enum, enum_value), value=enum_value.value))
+ common.template_args(
+ 'constexpr StringData ${constant_name} = "${value}"_sd;',
+ constant_name=_get_constant_enum_name(self._enum,
+ enum_value), value=enum_value.value))
indented_writer.write_empty_line()
with writer.TemplateContext(indented_writer, template_params):
with writer.IndentedScopedBlock(indented_writer, "${function_name} {", "}"):
for enum_value in self._enum.values:
- predicate = 'if (value == %s) {' % (
- _get_constant_enum_name(self._enum, enum_value))
+ predicate = 'if (value == %s) {' % (_get_constant_enum_name(
+ self._enum, enum_value))
with writer.IndentedScopedBlock(indented_writer, predicate, "}"):
- indented_writer.write_template('return ${enum_name}::%s;' %
- (enum_value.name))
+ indented_writer.write_template(
+ 'return ${enum_name}::%s;' % (enum_value.name))
indented_writer.write_line("ctxt.throwBadEnumValue(value);")
def get_serializer_declaration(self):
- # type: () -> unicode
+ # type: () -> str
"""Get the serializer function declaration minus trailing semicolon."""
return common.template_args("StringData ${function_name}(${enum_name} value)",
enum_name=self.get_cpp_type_name(),
@@ -270,9 +263,9 @@ class _EnumTypeString(EnumTypeInfoBase):
with writer.TemplateContext(indented_writer, template_params):
with writer.IndentedScopedBlock(indented_writer, "${function_name} {", "}"):
for enum_value in self._enum.values:
- with writer.IndentedScopedBlock(indented_writer,
- 'if (value == ${enum_name}::%s) {' %
- (enum_value.name), "}"):
+ with writer.IndentedScopedBlock(
+ indented_writer, 'if (value == ${enum_name}::%s) {' % (enum_value.name),
+ "}"):
indented_writer.write_line(
'return %s;' % (_get_constant_enum_name(self._enum, enum_value)))
diff --git a/buildscripts/idl/idl/errors.py b/buildscripts/idl/idl/errors.py
index 53cb8a33ed5..42021fdc5da 100644
--- a/buildscripts/idl/idl/errors.py
+++ b/buildscripts/idl/idl/errors.py
@@ -32,8 +32,6 @@ Common error handling code for IDL compiler.
- Error codes used by the IDL compiler.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
import inspect
import os
import sys
@@ -132,7 +130,7 @@ class ParserError(common.SourceLocation):
"""
def __init__(self, error_id, msg, file_name, line, column):
- # type: (unicode, unicode, unicode, int, int) -> None
+ # type: (str, str, str, int, int) -> None
"""Construct a parser error with source location information."""
# pylint: disable=too-many-arguments
self.error_id = error_id
@@ -160,7 +158,7 @@ class ParserErrorCollection(object):
self._errors = [] # type: List[ParserError]
def add(self, location, error_id, msg):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error message with file (line, column) information."""
self._errors.append(
ParserError(error_id, msg, location.file_name, location.line, location.column))
@@ -171,12 +169,12 @@ class ParserErrorCollection(object):
return len(self._errors) > 0
def contains(self, error_id):
- # type: (unicode) -> bool
+ # type: (str) -> bool
"""Check if the error collection has at least one message of a given error_id."""
return len([a for a in self._errors if a.error_id == error_id]) > 0
def to_list(self):
- # type: () -> List[unicode]
+ # type: () -> List[str]
"""Return a list of formatted error messages."""
return [str(error) for error in self._errors]
@@ -211,13 +209,13 @@ class ParserContext(object):
# pylint: disable=too-many-public-methods
def __init__(self, file_name, errors):
- # type: (unicode, ParserErrorCollection) -> None
+ # type: (str, ParserErrorCollection) -> None
"""Construct a new ParserContext."""
self.errors = errors
self.file_name = file_name
def _add_error(self, location, error_id, msg):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""
Add an error with a source location information.
@@ -226,7 +224,7 @@ class ParserContext(object):
self.errors.add(location, error_id, msg)
def _add_node_error(self, node, error_id, msg):
- # type: (yaml.nodes.Node, unicode, unicode) -> None
+ # type: (yaml.nodes.Node, str, str) -> None
"""Add an error with source location information based on a YAML node."""
self.errors.add(
common.SourceLocation(self.file_name, node.start_mark.line, node.start_mark.column),
@@ -235,32 +233,32 @@ class ParserContext(object):
def add_unknown_root_node_error(self, node):
# type: (yaml.nodes.Node) -> None
"""Add an error about an unknown YAML root node."""
- self._add_node_error(node, ERROR_ID_UNKNOWN_ROOT,
- ("Unrecognized IDL specification root level node '%s', only " +
- " (global, import, types, commands, and structs) are accepted") %
- (node.value))
+ self._add_node_error(
+ node, ERROR_ID_UNKNOWN_ROOT,
+ ("Unrecognized IDL specification root level node '%s', only " +
+ " (global, import, types, commands, and structs) are accepted") % (node.value))
def add_unknown_node_error(self, node, name):
- # type: (yaml.nodes.Node, unicode) -> None
+ # type: (yaml.nodes.Node, str) -> None
"""Add an error about an unknown node."""
self._add_node_error(node, ERROR_ID_UNKNOWN_NODE,
"Unknown IDL node '%s' for YAML entity '%s'" % (node.value, name))
def add_duplicate_symbol_error(self, location, name, duplicate_class_name, original_class_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a duplicate symbol."""
- self._add_error(location, ERROR_ID_DUPLICATE_SYMBOL,
- "%s '%s' is a duplicate symbol of an existing %s" %
- (duplicate_class_name, name, original_class_name))
+ self._add_error(
+ location, ERROR_ID_DUPLICATE_SYMBOL, "%s '%s' is a duplicate symbol of an existing %s" %
+ (duplicate_class_name, name, original_class_name))
def add_unknown_type_error(self, location, field_name, type_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about an unknown type."""
self._add_error(location, ERROR_ID_UNKNOWN_TYPE,
"'%s' is an unknown type for field '%s'" % (type_name, field_name))
def _is_node_type(self, node, node_name, expected_node_type):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode, unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str, str) -> bool
"""Return True if the yaml node type is expected, otherwise returns False and logs an error."""
if not node.id == expected_node_type:
self._add_node_error(
@@ -271,17 +269,17 @@ class ParserContext(object):
return True
def is_mapping_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
"""Return True if this YAML node is a Map."""
return self._is_node_type(node, node_name, "mapping")
def is_scalar_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
"""Return True if this YAML node is a Scalar."""
return self._is_node_type(node, node_name, "scalar")
def is_scalar_sequence(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
"""Return True if this YAML node is a Sequence of Scalars."""
if self._is_node_type(node, node_name, "sequence"):
for seq_node in node.value:
@@ -291,7 +289,7 @@ class ParserContext(object):
return False
def is_scalar_sequence_or_scalar_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
# pylint: disable=invalid-name
"""Return True if the YAML node is a Scalar or Sequence."""
if not node.id == "scalar" and not node.id == "sequence":
@@ -307,7 +305,7 @@ class ParserContext(object):
return True
def is_scalar_or_mapping_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
# pylint: disable=invalid-name
"""Return True if the YAML node is a Scalar or Mapping."""
if not node.id == "scalar" and not node.id == "mapping":
@@ -320,7 +318,7 @@ class ParserContext(object):
return True
def is_scalar_bool_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
"""Return True if this YAML node is a Scalar and a valid boolean."""
if not self._is_node_type(node, node_name, "scalar"):
return False
@@ -343,7 +341,7 @@ class ParserContext(object):
return False
def get_list(self, node):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> List[unicode]
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> List[str]
"""Get a YAML scalar or sequence node as a list of strings."""
assert self.is_scalar_sequence_or_scalar_node(node, "unknown")
if node.id == "scalar":
@@ -352,49 +350,49 @@ class ParserContext(object):
return [v.value for v in node.value]
def add_duplicate_error(self, node, node_name):
- # type: (yaml.nodes.Node, unicode) -> None
+ # type: (yaml.nodes.Node, str) -> None
"""Add an error about a duplicate node."""
self._add_node_error(node, ERROR_ID_DUPLICATE_NODE,
"Duplicate node found for '%s'" % (node_name))
def add_empty_struct_error(self, node, name):
- # type: (yaml.nodes.Node, unicode) -> None
+ # type: (yaml.nodes.Node, str) -> None
"""Add an error about a struct without fields."""
self._add_node_error(node, ERROR_ID_EMPTY_FIELDS,
("Struct '%s' must either have fields, chained_types, or " +
"chained_structs specified but neither were found") % (name))
def add_missing_required_field_error(self, node, node_parent, node_name):
- # type: (yaml.nodes.Node, unicode, unicode) -> None
+ # type: (yaml.nodes.Node, str, str) -> None
"""Add an error about a YAML node missing a required child."""
# pylint: disable=invalid-name
- self._add_node_error(node, ERROR_ID_MISSING_REQUIRED_FIELD,
- "IDL node '%s' is missing required scalar '%s'" % (node_parent,
- node_name))
+ self._add_node_error(
+ node, ERROR_ID_MISSING_REQUIRED_FIELD,
+ "IDL node '%s' is missing required scalar '%s'" % (node_parent, node_name))
def add_missing_ast_required_field_error(self, location, ast_type, ast_parent, ast_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a AST node missing a required child."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_MISSING_AST_REQUIRED_FIELD,
- "%s '%s' is missing required scalar '%s'" % (ast_type, ast_parent,
- ast_name))
+ self._add_error(
+ location, ERROR_ID_MISSING_AST_REQUIRED_FIELD,
+ "%s '%s' is missing required scalar '%s'" % (ast_type, ast_parent, ast_name))
def add_array_not_valid_error(self, location, ast_type, name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about a 'array' not being a valid type name."""
self._add_error(location, ERROR_ID_ARRAY_NOT_VALID_TYPE,
"The %s '%s' cannot be named 'array'" % (ast_type, name))
def add_bad_bson_type_error(self, location, ast_type, ast_parent, bson_type_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a bad bson type."""
- self._add_error(location, ERROR_ID_BAD_BSON_TYPE,
- "BSON Type '%s' is not recognized for %s '%s'." % (bson_type_name, ast_type,
- ast_parent))
+ self._add_error(
+ location, ERROR_ID_BAD_BSON_TYPE, "BSON Type '%s' is not recognized for %s '%s'." %
+ (bson_type_name, ast_type, ast_parent))
def add_bad_bson_scalar_type_error(self, location, ast_type, ast_parent, bson_type_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a bad list of bson types."""
self._add_error(location, ERROR_ID_BAD_BSON_TYPE_LIST,
("BSON Type '%s' is not a scalar bson type for %s '%s'" +
@@ -402,7 +400,7 @@ class ParserContext(object):
(bson_type_name, ast_type, ast_parent))
def add_bad_bson_bindata_subtype_error(self, location, ast_type, ast_parent, bson_type_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a bindata_subtype associated with a type that is not bindata."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_BAD_BSON_BINDATA_SUBTYPE_TYPE,
@@ -410,7 +408,7 @@ class ParserContext(object):
(ast_type, ast_parent, bson_type_name))
def add_bad_bson_bindata_subtype_value_error(self, location, ast_type, ast_parent, value):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about a bad value for bindata_subtype."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_BAD_BSON_BINDATA_SUBTYPE_VALUE,
@@ -418,22 +416,22 @@ class ParserContext(object):
(value, ast_type, ast_parent))
def add_bad_setat_specifier(self, location, specifier):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about a bad set_at specifier."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_BAD_SETAT_SPECIFIER,
- ("Unexpected set_at specifier: '%s', expected 'startup' or 'runtime'") %
- (specifier))
+ self._add_error(
+ location, ERROR_ID_BAD_SETAT_SPECIFIER,
+ ("Unexpected set_at specifier: '%s', expected 'startup' or 'runtime'") % (specifier))
def add_no_string_data_error(self, location, ast_type, ast_parent):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about using StringData for cpp_type."""
self._add_error(location, ERROR_ID_NO_STRINGDATA,
("Do not use mongo::StringData for %s '%s', use std::string instead") %
(ast_type, ast_parent))
def add_ignored_field_must_be_empty_error(self, location, name, field_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about field must be empty for ignored fields."""
# pylint: disable=invalid-name
self._add_error(
@@ -442,16 +440,17 @@ class ParserContext(object):
) % (name, field_name))
def add_struct_field_must_be_empty_error(self, location, name, field_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about field must be empty for fields of type struct."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_STRUCT, (
- "Field '%s' cannot contain a value for property '%s' when a field's type is a struct") %
- (name, field_name))
+ self._add_error(
+ location, ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_STRUCT,
+ ("Field '%s' cannot contain a value for property '%s' when a field's type is a struct")
+ % (name, field_name))
def add_not_custom_scalar_serialization_not_supported_error(self, location, ast_type,
ast_parent, bson_type_name):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about field must be empty for fields of type struct."""
self._add_error(
@@ -461,7 +460,7 @@ class ParserContext(object):
(ast_type, ast_parent, bson_type_name))
def add_bad_any_type_use_error(self, location, bson_type, ast_type, ast_parent):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about any being used in a list of bson types."""
self._add_error(
@@ -470,25 +469,24 @@ class ParserContext(object):
"%s '%s'. It must be only a single bson type.") % (bson_type, ast_type, ast_parent))
def add_bad_cpp_numeric_type_use_error(self, location, ast_type, ast_parent, cpp_type):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about any being used in a list of bson types."""
self._add_error(
location, ERROR_ID_BAD_NUMERIC_CPP_TYPE,
("The C++ numeric type '%s' is not allowed for %s '%s'. Only 'std::int32_t'," +
- " 'std::uint32_t', 'std::uint64_t', and 'std::int64_t' are supported.") % (cpp_type,
- ast_type,
- ast_parent))
+ " 'std::uint32_t', 'std::uint64_t', and 'std::int64_t' are supported.") %
+ (cpp_type, ast_type, ast_parent))
def add_bad_array_type_name_error(self, location, field_name, type_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about a field type having a malformed type name."""
self._add_error(location, ERROR_ID_BAD_ARRAY_TYPE_NAME,
("'%s' is not a valid array type for field '%s'. A valid array type" +
" is in the form 'array<type_name>'.") % (type_name, field_name))
def add_array_no_default_error(self, location, field_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about an array having a type with a default value."""
self._add_error(
location, ERROR_ID_ARRAY_NO_DEFAULT,
@@ -496,27 +494,27 @@ class ParserContext(object):
(field_name))
def add_cannot_find_import(self, location, imported_file_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about not being able to find an import."""
self._add_error(location, ERROR_ID_BAD_IMPORT,
"Could not resolve import '%s', file not found" % (imported_file_name))
def add_bindata_no_default(self, location, ast_type, ast_parent):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about 'any' being used in a list of bson types."""
self._add_error(location, ERROR_ID_BAD_BINDATA_DEFAULT,
("Default values are not allowed for %s '%s'") % (ast_type, ast_parent))
def add_chained_type_not_found_error(self, location, type_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
# pylint: disable=invalid-name
"""Add an error about a chained_type not found."""
self._add_error(location, ERROR_ID_CHAINED_TYPE_NOT_FOUND,
("Type '%s' is not a valid chained type") % (type_name))
def add_chained_type_wrong_type_error(self, location, type_name, bson_type_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about a chained_type being the wrong type."""
self._add_error(location, ERROR_ID_CHAINED_TYPE_WRONG_BSON_TYPE,
@@ -524,14 +522,15 @@ class ParserContext(object):
"'chain' is supported for chained types.") % (type_name, bson_type_name))
def add_duplicate_field_error(self, location, field_container, field_name, duplicate_location):
- # type: (common.SourceLocation, unicode, unicode, common.SourceLocation) -> None
+ # type: (common.SourceLocation, str, str, common.SourceLocation) -> None
"""Add an error about duplicate fields as a result of chained structs/types."""
- self._add_error(location, ERROR_ID_CHAINED_DUPLICATE_FIELD, (
- "Chained Struct or Type '%s' duplicates an existing field '%s' at location" + "'%s'.") %
- (field_container, field_name, duplicate_location))
+ self._add_error(
+ location, ERROR_ID_CHAINED_DUPLICATE_FIELD,
+ ("Chained Struct or Type '%s' duplicates an existing field '%s' at location" + "'%s'.")
+ % (field_container, field_name, duplicate_location))
def add_chained_type_no_strict_error(self, location, struct_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
# pylint: disable=invalid-name
"""Add an error about strict parser validate and chained types."""
self._add_error(location, ERROR_ID_CHAINED_NO_TYPE_STRICT,
@@ -539,14 +538,14 @@ class ParserContext(object):
"struct '%s'. Specify 'strict: false' for this struct.") % (struct_name))
def add_chained_struct_not_found_error(self, location, struct_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
# pylint: disable=invalid-name
"""Add an error about a chained_struct not found."""
self._add_error(location, ERROR_ID_CHAINED_STRUCT_NOT_FOUND,
("Type '%s' is not a valid chained struct") % (struct_name))
def add_chained_nested_struct_no_strict_error(self, location, struct_name, nested_struct_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about strict parser validate and chained types."""
self._add_error(location, ERROR_ID_CHAINED_NO_NESTED_STRUCT_STRICT,
@@ -555,7 +554,7 @@ class ParserContext(object):
(nested_struct_name, struct_name))
def add_chained_nested_struct_no_nested_error(self, location, struct_name, chained_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
# pylint: disable=invalid-name
"""Add an error about struct's chaining being a struct with nested chaining."""
self._add_error(location, ERROR_ID_CHAINED_NO_NESTED_CHAINED,
@@ -563,39 +562,40 @@ class ParserContext(object):
" structs and/or types.") % (struct_name, chained_name))
def add_empty_enum_error(self, node, name):
- # type: (yaml.nodes.Node, unicode) -> None
+ # type: (yaml.nodes.Node, str) -> None
"""Add an error about an enum without values."""
- self._add_node_error(node, ERROR_ID_BAD_EMPTY_ENUM,
- "Enum '%s' must have values specified but no values were found" %
- (name))
+ self._add_node_error(
+ node, ERROR_ID_BAD_EMPTY_ENUM,
+ "Enum '%s' must have values specified but no values were found" % (name))
def add_array_enum_error(self, location, field_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error for a field being an array of enums."""
self._add_error(location, ERROR_ID_NO_ARRAY_ENUM,
"Field '%s' cannot be an array of enums" % (field_name))
def add_enum_bad_type_error(self, location, enum_name, enum_type):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error for an enum having the wrong type."""
self._add_error(location, ERROR_ID_ENUM_BAD_TYPE,
"Enum '%s' type '%s' is not a supported enum type" % (enum_name, enum_type))
def add_enum_value_not_int_error(self, location, enum_name, enum_value, err_msg):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error for an enum value not being an integer."""
- self._add_error(location, ERROR_ID_ENUM_BAD_INT_VAUE,
- "Enum '%s' value '%s' is not an integer, exception '%s'" %
- (enum_name, enum_value, err_msg))
+ self._add_error(
+ location, ERROR_ID_ENUM_BAD_INT_VAUE,
+ "Enum '%s' value '%s' is not an integer, exception '%s'" % (enum_name, enum_value,
+ err_msg))
def add_enum_value_not_unique_error(self, location, enum_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error for an enum having duplicate values."""
self._add_error(location, ERROR_ID_ENUM_NON_UNIQUE_VALUES,
"Enum '%s' has duplicate values, all values must be unique" % (enum_name))
def add_enum_non_continuous_range_error(self, location, enum_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error for an enum having duplicate values."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_ENUM_NON_CONTINUOUS_RANGE,
@@ -604,7 +604,7 @@ class ParserContext(object):
def add_bad_command_namespace_error(self, location, command_name, command_namespace,
valid_commands):
- # type: (common.SourceLocation, unicode, unicode, List[unicode]) -> None
+ # type: (common.SourceLocation, str, str, List[str]) -> None
"""Add an error about the namespace value not being a valid choice."""
self._add_error(
location, ERROR_ID_BAD_COMMAND_NAMESPACE,
@@ -612,21 +612,20 @@ class ParserContext(object):
% (command_namespace, command_name, valid_commands))
def add_bad_command_as_field_error(self, location, command_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about using a command for a field."""
- self._add_error(
- location, ERROR_ID_FIELD_NO_COMMAND,
- ("Command '%s' cannot be used as a field type'. Commands must be top-level" +
- " types due to their serialization rules.") % (command_name))
+ self._add_error(location, ERROR_ID_FIELD_NO_COMMAND,
+ ("Command '%s' cannot be used as a field type'. Commands must be top-level"
+ + " types due to their serialization rules.") % (command_name))
def add_bad_array_of_chain(self, location, field_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about a field being an array of chain_types."""
self._add_error(location, ERROR_ID_NO_ARRAY_OF_CHAIN,
"Field '%s' cannot be an array of chained types" % (field_name))
def add_bad_field_default_and_optional(self, location, field_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about a field being optional and having a default value."""
# pylint: disable=invalid-name
self._add_error(
@@ -635,7 +634,7 @@ class ParserContext(object):
(field_name))
def add_bad_struct_field_as_doc_sequence_error(self, location, struct_name, field_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about using a field in a struct being marked with supports_doc_sequence."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_STRUCT_NO_DOC_SEQUENCE,
@@ -643,7 +642,7 @@ class ParserContext(object):
" type. They are only supported in commands.") % (field_name, struct_name))
def add_bad_non_array_as_doc_sequence_error(self, location, struct_name, field_name):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about using a non-array type field being marked with supports_doc_sequence."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_NO_DOC_SEQUENCE_FOR_NON_ARRAY,
@@ -651,7 +650,7 @@ class ParserContext(object):
" type since it is not an array.") % (field_name, struct_name))
def add_bad_non_object_as_doc_sequence_error(self, location, field_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about using a non-struct or BSON object for a doc sequence."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_NO_DOC_SEQUENCE_FOR_NON_OBJECT,
@@ -659,14 +658,14 @@ class ParserContext(object):
" type since it is not a BSON object or struct.") % (field_name))
def add_bad_command_name_duplicates_field(self, location, command_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about a command and field having the same name."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_COMMAND_DUPLICATES_FIELD,
("Command '%s' cannot have the same name as a field.") % (command_name))
def is_scalar_non_negative_int_node(self, node, node_name):
- # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], unicode) -> bool
+ # type: (Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode], str) -> bool
"""Return True if this YAML node is a Scalar and a valid non-negative int."""
if not self._is_node_type(node, node_name, "scalar"):
return False
@@ -681,9 +680,9 @@ class ParserContext(object):
return False
except ValueError as value_error:
- self._add_node_error(node, ERROR_ID_IS_NODE_VALID_INT,
- "Illegal integer value for '%s', message '%s'." % (node_name,
- value_error))
+ self._add_node_error(
+ node, ERROR_ID_IS_NODE_VALID_INT,
+ "Illegal integer value for '%s', message '%s'." % (node_name, value_error))
return False
return True
@@ -696,7 +695,7 @@ class ParserContext(object):
return int(node.value)
def add_duplicate_comparison_order_field_error(self, location, struct_name, comparison_order):
- # type: (common.SourceLocation, unicode, int) -> None
+ # type: (common.SourceLocation, str, int) -> None
"""Add an error about fields having duplicate comparison_orders."""
# pylint: disable=invalid-name
self._add_error(
@@ -705,7 +704,7 @@ class ParserContext(object):
(struct_name, comparison_order))
def add_extranous_command_type(self, location, command_name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about commands having type when not needed."""
# pylint: disable=invalid-name
self._add_error(
@@ -714,23 +713,23 @@ class ParserContext(object):
(command_name))
def add_value_not_numeric_error(self, location, attrname, value):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about non-numeric value where number expected."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_VALUE_NOT_NUMERIC,
- ("'%s' requires a numeric value, but %s can not be cast") % (attrname,
- value))
+ self._add_error(
+ location, ERROR_ID_VALUE_NOT_NUMERIC,
+ ("'%s' requires a numeric value, but %s can not be cast") % (attrname, value))
def add_server_parameter_invalid_attr(self, location, attrname, conflicts):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about invalid fields in a server parameter definition."""
# pylint: disable=invalid-name
- self._add_error(location, ERROR_ID_SERVER_PARAMETER_INVALID_ATTR,
- ("'%s' attribute not permitted with '%s' server parameter") % (attrname,
- conflicts))
+ self._add_error(
+ location, ERROR_ID_SERVER_PARAMETER_INVALID_ATTR,
+ ("'%s' attribute not permitted with '%s' server parameter") % (attrname, conflicts))
def add_server_parameter_required_attr(self, location, attrname, required, dependant=None):
- # type: (common.SourceLocation, unicode, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str, str) -> None
"""Add an error about missing fields in a server parameter definition."""
# pylint: disable=invalid-name
qualifier = '' if dependant is None else (" when using '%s' attribute" % (dependant))
@@ -739,28 +738,28 @@ class ParserContext(object):
(attrname, qualifier, required))
def add_server_parameter_invalid_method_override(self, location, method):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about invalid method override in SCP method override."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_SERVER_PARAMETER_INVALID_METHOD_OVERRIDE,
("No such method to override in server parameter class: '%s'") % (method))
def add_bad_source_specifier(self, location, value):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about invalid source specifier."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_BAD_SOURCE_SPECIFIER,
("'%s' is not a valid source specifier") % (value))
def add_bad_duplicate_behavior(self, location, value):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about invalid duplicate behavior specifier."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_BAD_DUPLICATE_BEHAVIOR_SPECIFIER,
("'%s' is not a valid duplicate behavior specifier") % (value))
def add_bad_numeric_range(self, location, attrname, value):
- # type: (common.SourceLocation, unicode, unicode) -> None
+ # type: (common.SourceLocation, str, str) -> None
"""Add an error about invalid range specifier."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_BAD_NUMERIC_RANGE,
@@ -774,21 +773,21 @@ class ParserContext(object):
"Missing 'short_name' for positional arg")
def add_invalid_short_name(self, location, name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about invalid short names."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_INVALID_SHORT_NAME,
("Invalid 'short_name' value '%s'") % (name))
def add_invalid_single_name(self, location, name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about invalid single names."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_INVALID_SINGLE_NAME,
("Invalid 'single_name' value '%s'") % (name))
def add_missing_short_name_with_single_name(self, location, name):
- # type: (common.SourceLocation, unicode) -> None
+ # type: (common.SourceLocation, str) -> None
"""Add an error about missing required short name when using single name."""
# pylint: disable=invalid-name
self._add_error(location, ERROR_ID_MISSING_SHORT_NAME_WITH_SINGLE_NAME,
diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py
index e8580ae4823..4d91cff4b36 100644
--- a/buildscripts/idl/idl/generator.py
+++ b/buildscripts/idl/idl/generator.py
@@ -28,8 +28,6 @@
# pylint: disable=too-many-lines
"""IDL C++ Code Generator."""
-from __future__ import absolute_import, print_function, unicode_literals
-
from abc import ABCMeta, abstractmethod
import copy
import io
@@ -51,25 +49,25 @@ from . import writer
def _get_field_member_name(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the C++ class member name for a field."""
return '_%s' % (common.camel_case(field.cpp_name))
def _get_field_member_setter_name(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the C++ class setter name for a field."""
return "set%s" % (common.title_case(field.cpp_name))
def _get_field_member_getter_name(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the C++ class getter name for a field."""
return "get%s" % (common.title_case(field.cpp_name))
def _get_has_field_member_name(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the C++ class member name for bool 'has' member field."""
return '_has%s' % (common.title_case(field.cpp_name))
@@ -86,20 +84,20 @@ def _is_required_serializer_field(field):
def _get_field_constant_name(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the C++ string constant name for a field."""
- return common.template_args('k${constant_name}FieldName', constant_name=common.title_case(
- field.cpp_name))
+ return common.template_args('k${constant_name}FieldName',
+ constant_name=common.title_case(field.cpp_name))
def _get_field_member_validator_name(field):
- # type (ast.Field) -> unicode
+ # type (ast.Field) -> str
"""Get the name of the validator method for this field."""
return 'validate%s' % common.title_case(field.cpp_name)
def _access_member(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the declaration to access a member for a field."""
member_name = _get_field_member_name(field)
@@ -111,7 +109,7 @@ def _access_member(field):
def _get_bson_type_check(bson_element, ctxt_name, field):
- # type: (unicode, unicode, ast.Field) -> unicode
+ # type: (str, str, ast.Field) -> str
"""Get the C++ bson type check for a field."""
bson_types = field.bson_serialization_type
if len(bson_types) == 1:
@@ -133,7 +131,7 @@ def _get_bson_type_check(bson_element, ctxt_name, field):
def _get_comparison(field, rel_op, left, right):
- # type: (ast.Field, unicode, unicode, unicode) -> unicode
+ # type: (ast.Field, str, str, str) -> str
"""Generate a comparison for a field."""
name = _get_field_member_name(field)
if not "BSONObj" in field.cpp_type:
@@ -166,7 +164,7 @@ def _get_comparison(field, rel_op, left, right):
def _get_comparison_less(fields):
- # type: (List[ast.Field]) -> unicode
+ # type: (List[ast.Field]) -> str
"""Generate a less than comparison for a list of fields recursively."""
field = fields[0]
if len(fields) == 1:
@@ -189,11 +187,9 @@ def _get_all_fields(struct):
return sorted([field for field in all_fields], key=lambda f: f.cpp_name)
-class _FieldUsageCheckerBase(object):
+class _FieldUsageCheckerBase(object, metaclass=ABCMeta):
"""Check for duplicate fields, and required fields as needed."""
- __metaclass__ = ABCMeta
-
def __init__(self, indented_writer):
# type: (writer.IndentedTextWriter) -> None
"""Create a field usage checker."""
@@ -202,13 +198,13 @@ class _FieldUsageCheckerBase(object):
@abstractmethod
def add_store(self, field_name):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Create the C++ field store initialization code."""
pass
@abstractmethod
def add(self, field, bson_element_variable):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
"""Add a field to track."""
pass
@@ -235,14 +231,14 @@ class _SlowFieldUsageChecker(_FieldUsageCheckerBase):
self._writer.write_line('std::set<StringData> usedFields;')
def add_store(self, field_name):
- # type: (unicode) -> None
+ # type: (str) -> None
self._writer.write_line('auto push_result = usedFields.insert(%s);' % (field_name))
with writer.IndentedScopedBlock(self._writer,
'if (MONGO_unlikely(push_result.second == false)) {', '}'):
self._writer.write_line('ctxt.throwDuplicateField(%s);' % (field_name))
def add(self, field, bson_element_variable):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
if not field in self._fields:
self._fields.append(field)
@@ -255,25 +251,25 @@ class _SlowFieldUsageChecker(_FieldUsageCheckerBase):
with writer.IndentedScopedBlock(self._writer, pred, '}'):
if field.default:
if field.enum_type:
- self._writer.write_line('%s = %s::%s;' %
- (_get_field_member_name(field), field.cpp_type,
- field.default))
+ self._writer.write_line(
+ '%s = %s::%s;' % (_get_field_member_name(field), field.cpp_type,
+ field.default))
else:
- self._writer.write_line('%s = %s;' % (_get_field_member_name(field),
- field.default))
+ self._writer.write_line(
+ '%s = %s;' % (_get_field_member_name(field), field.default))
else:
- self._writer.write_line('ctxt.throwMissingField(%s);' %
- (_get_field_constant_name(field)))
+ self._writer.write_line(
+ 'ctxt.throwMissingField(%s);' % (_get_field_constant_name(field)))
def _gen_field_usage_constant(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Get the name for a bitset constant in field usage checking."""
return "k%sBit" % (common.title_case(field.cpp_name))
def _get_constant(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Transform an arbitrary label to a constant name."""
return 'k' + re.sub(r'([^a-zA-Z0-9_]+)', '_', common.title_case(name))
@@ -298,23 +294,24 @@ class _FastFieldUsageChecker(_FieldUsageCheckerBase):
if field.chained:
continue
- self._writer.write_line('const size_t %s = %d;' % (_gen_field_usage_constant(field),
- bit_id))
+ self._writer.write_line(
+ 'const size_t %s = %d;' % (_gen_field_usage_constant(field), bit_id))
bit_id += 1
def add_store(self, field_name):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Create the C++ field store initialization code."""
pass
def add(self, field, bson_element_variable):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
"""Add a field to track."""
if not field in self._fields:
self._fields.append(field)
- with writer.IndentedScopedBlock(self._writer, 'if (MONGO_unlikely(usedFields[%s])) {' %
- (_gen_field_usage_constant(field)), '}'):
+ with writer.IndentedScopedBlock(
+ self._writer,
+ 'if (MONGO_unlikely(usedFields[%s])) {' % (_gen_field_usage_constant(field)), '}'):
self._writer.write_line('ctxt.throwDuplicateField(%s);' % (bson_element_variable))
self._writer.write_empty_line()
@@ -328,8 +325,9 @@ class _FastFieldUsageChecker(_FieldUsageCheckerBase):
'}'):
for field in self._fields:
if (not field.optional) and (not field.ignore):
- with writer.IndentedScopedBlock(self._writer, 'if (!usedFields[%s]) {' %
- (_gen_field_usage_constant(field)), '}'):
+ with writer.IndentedScopedBlock(
+ self._writer,
+ 'if (!usedFields[%s]) {' % (_gen_field_usage_constant(field)), '}'):
if field.default:
if field.chained_struct_field:
self._writer.write_line(
@@ -337,15 +335,15 @@ class _FastFieldUsageChecker(_FieldUsageCheckerBase):
(_get_field_member_name(field.chained_struct_field),
_get_field_member_setter_name(field), field.default))
elif field.enum_type:
- self._writer.write_line('%s = %s::%s;' %
- (_get_field_member_name(field),
- field.cpp_type, field.default))
+ self._writer.write_line(
+ '%s = %s::%s;' % (_get_field_member_name(field), field.cpp_type,
+ field.default))
else:
- self._writer.write_line('%s = %s;' % (_get_field_member_name(field),
- field.default))
+ self._writer.write_line(
+ '%s = %s;' % (_get_field_member_name(field), field.default))
else:
- self._writer.write_line('ctxt.throwMissingField(%s);' %
- (_get_field_constant_name(field)))
+ self._writer.write_line(
+ 'ctxt.throwMissingField(%s);' % (_get_field_constant_name(field)))
def _get_field_usage_checker(indented_writer, struct):
@@ -361,7 +359,7 @@ def _get_field_usage_checker(indented_writer, struct):
# Turn a python string into a C++ literal.
def _encaps(val):
- # type: (unicode) -> unicode
+ # type: (str) -> str
if val is None:
return '""'
@@ -373,7 +371,7 @@ def _encaps(val):
# Turn a list of pything strings into a C++ initializer list.
def _encaps_list(vals):
- # type: (List[unicode]) -> unicode
+ # type: (List[str]) -> str
if vals is None:
return '{}'
@@ -382,7 +380,7 @@ def _encaps_list(vals):
# Translate an ast.Expression into C++ code.
def _get_expression(expr):
- # type: (ast.Expression) -> unicode
+ # type: (ast.Expression) -> str
if not expr.validate_constexpr:
return expr.expr
@@ -405,7 +403,7 @@ class _CppFileWriterBase(object):
self._writer = indented_writer # type: writer.IndentedTextWriter
def write_unindented_line(self, msg):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Write an unindented line to the stream."""
self._writer.write_unindented_line(msg)
@@ -427,24 +425,24 @@ class _CppFileWriterBase(object):
""" % (" ".join(sys.argv))))
def gen_system_include(self, include):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Generate a system C++ include line."""
self._writer.write_unindented_line('#include <%s>' % (include))
def gen_include(self, include):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Generate a non-system C++ include line."""
self._writer.write_unindented_line('#include "%s"' % (include))
def gen_namespace_block(self, namespace):
- # type: (unicode) -> writer.NamespaceScopeBlock
+ # type: (str) -> writer.NamespaceScopeBlock
"""Generate a namespace block."""
namespace_list = namespace.split("::")
return writer.NamespaceScopeBlock(self._writer, namespace_list)
def get_initializer_lambda(self, decl, unused=False, return_type=None):
- # type: (unicode, bool, unicode) -> writer.IndentedScopedBlock
+ # type: (str, bool, str) -> writer.IndentedScopedBlock
"""Generate an indented block lambda initializing an outer scope variable."""
prefix = 'MONGO_COMPILER_VARIABLE_UNUSED ' if unused else ''
prefix = prefix + decl + ' = ([]'
@@ -453,7 +451,7 @@ class _CppFileWriterBase(object):
return writer.IndentedScopedBlock(self._writer, prefix + ' {', '})();')
def gen_description_comment(self, description):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Generate a multiline comment with the description from the IDL."""
self._writer.write_line(
textwrap.dedent("""\
@@ -462,12 +460,12 @@ class _CppFileWriterBase(object):
*/""" % (description)))
def _with_template(self, template_params):
- # type: (Mapping[unicode,unicode]) -> writer.TemplateContext
+ # type: (Mapping[str,str]) -> writer.TemplateContext
"""Generate a template context for the current parameters."""
return writer.TemplateContext(self._writer, template_params)
def _block(self, opening, closing):
- # type: (unicode, unicode) -> Union[writer.IndentedScopedBlock,writer.EmptyBlock]
+ # type: (str, str) -> Union[writer.IndentedScopedBlock,writer.EmptyBlock]
"""Generate an indented block if opening is not empty."""
if not opening:
return writer.EmptyBlock()
@@ -475,7 +473,7 @@ class _CppFileWriterBase(object):
return writer.IndentedScopedBlock(self._writer, opening, closing)
def _predicate(self, check_str, use_else_if=False, constexpr=False):
- # type: (unicode, bool, bool) -> Union[writer.IndentedScopedBlock,writer.EmptyBlock]
+ # type: (str, bool, bool) -> Union[writer.IndentedScopedBlock,writer.EmptyBlock]
"""
Generate an if block if the condition is not-empty.
@@ -524,7 +522,7 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
"""C++ .h File writer."""
def gen_class_declaration_block(self, class_name):
- # type: (unicode) -> writer.IndentedScopedBlock
+ # type: (str) -> writer.IndentedScopedBlock
"""Generate a class declaration block."""
return writer.IndentedScopedBlock(self._writer,
'class %s {' % common.title_case(class_name), '};')
@@ -604,9 +602,9 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
if field.chained_struct_field:
self._writer.write_template(
- '${const_type} ${param_type} ${method_name}() const { return %s.%s(); }' %
- ((_get_field_member_name(field.chained_struct_field),
- _get_field_member_getter_name(field))))
+ '${const_type} ${param_type} ${method_name}() const { return %s.%s(); }' % (
+ (_get_field_member_name(field.chained_struct_field),
+ _get_field_member_getter_name(field))))
elif cpp_type_info.disable_xvalue():
self._writer.write_template(
@@ -684,8 +682,8 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
if field.default and not field.constructed:
if field.enum_type:
- self._writer.write_line('%s %s{%s::%s};' % (member_type, member_name,
- field.cpp_type, field.default))
+ self._writer.write_line(
+ '%s %s{%s::%s};' % (member_type, member_name, field.cpp_type, field.default))
else:
self._writer.write_line('%s %s{%s};' % (member_type, member_name, field.default))
else:
@@ -811,19 +809,19 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
self.write_empty_line()
def _gen_exported_constexpr(self, name, suffix, expr, condition):
- # type: (unicode, unicode, ast.Expression, ast.Condition) -> None
+ # type: (str, str, ast.Expression, ast.Condition) -> None
"""Generate exports for default initializer."""
if not (name and expr and expr.export):
return
with self._condition(condition, preprocessor_only=True):
- self._writer.write_line('constexpr auto %s%s = %s;' % (_get_constant(name), suffix,
- expr.expr))
+ self._writer.write_line(
+ 'constexpr auto %s%s = %s;' % (_get_constant(name), suffix, expr.expr))
self.write_empty_line()
def _gen_extern_declaration(self, vartype, varname, condition):
- # type: (unicode, unicode, ast.Condition) -> None
+ # type: (str, str, ast.Condition) -> None
"""Generate externs for storage declaration."""
if (vartype is None) or (varname is None):
return
@@ -851,11 +849,11 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
return
if initializer.register:
- self._writer.write_line('Status %s(optionenvironment::OptionSection*);' %
- (initializer.register))
+ self._writer.write_line(
+ 'Status %s(optionenvironment::OptionSection*);' % (initializer.register))
if initializer.store:
- self._writer.write_line('Status %s(const optionenvironment::Environment&);' %
- (initializer.store))
+ self._writer.write_line(
+ 'Status %s(const optionenvironment::Environment&);' % (initializer.store))
if initializer.register or initializer.store:
self.write_empty_line()
@@ -871,8 +869,8 @@ class _CppHeaderFileWriter(_CppFileWriterBase):
with self._block('class %s : public ServerParameter {' % (cls.name), '};'):
self._writer.write_unindented_line('public:')
if scp.default is not None:
- self._writer.write_line('static constexpr auto kDataDefault = %s;' %
- (scp.default.expr))
+ self._writer.write_line(
+ 'static constexpr auto kDataDefault = %s;' % (scp.default.expr))
if cls.override_ctor:
# Explicit custom constructor.
@@ -1048,13 +1046,13 @@ class _CppSourceFileWriter(_CppFileWriterBase):
"""C++ .cpp File writer."""
def __init__(self, indented_writer, target_arch):
- # type: (writer.IndentedTextWriter, unicode) -> None
+ # type: (writer.IndentedTextWriter, str) -> None
"""Create a C++ .cpp file code writer."""
self._target_arch = target_arch
super(_CppSourceFileWriter, self).__init__(indented_writer)
def _gen_field_deserializer_expression(self, element_name, field):
- # type: (unicode, ast.Field) -> unicode
+ # type: (str, ast.Field) -> str
# pylint: disable=invalid-name
"""
Generate the C++ deserializer piece for a field.
@@ -1064,8 +1062,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
"""
if field.struct_type:
- self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
- (_get_field_constant_name(field)))
+ self._writer.write_line(
+ 'IDLParserErrorContext tempContext(%s, &ctxt);' % (_get_field_constant_name(field)))
self._writer.write_line('const auto localObject = %s.Obj();' % (element_name))
return '%s::parse(tempContext, localObject)' % (common.title_case(field.struct_type))
elif field.deserializer and 'BSONElement::' in field.deserializer:
@@ -1104,14 +1102,14 @@ class _CppSourceFileWriter(_CppFileWriterBase):
return '%s(%s)' % (method_name, element_name)
def _gen_array_deserializer(self, field, bson_element):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
"""Generate the C++ deserializer piece for an array field."""
cpp_type_info = cpp_types.get_cpp_type(field)
cpp_type = cpp_type_info.get_type_name()
self._writer.write_line('std::uint32_t expectedFieldNumber{0};')
- self._writer.write_line('const IDLParserErrorContext arrayCtxt(%s, &ctxt);' %
- (_get_field_constant_name(field)))
+ self._writer.write_line(
+ 'const IDLParserErrorContext arrayCtxt(%s, &ctxt);' % (_get_field_constant_name(field)))
self._writer.write_line('std::vector<%s> values;' % (cpp_type))
self._writer.write_empty_line()
@@ -1145,14 +1143,13 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line('++expectedFieldNumber;')
if field.chained_struct_field:
- self._writer.write_line('%s.%s(std::move(values));' %
- (_get_field_member_name(field.chained_struct_field),
- _get_field_member_setter_name(field)))
+ self._writer.write_line('%s.%s(std::move(values));' % (_get_field_member_name(
+ field.chained_struct_field), _get_field_member_setter_name(field)))
else:
self._writer.write_line('%s = std::move(values);' % (_get_field_member_name(field)))
def _gen_usage_check(self, field, bson_element, field_usage_check):
- # type: (ast.Field, unicode, _FieldUsageCheckerBase) -> None
+ # type: (ast.Field, str, _FieldUsageCheckerBase) -> None
"""Generate the field usage check and insert the required field check."""
if field_usage_check:
field_usage_check.add(field, bson_element)
@@ -1161,7 +1158,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line('%s = true;' % (_get_has_field_member_name(field)))
def gen_field_deserializer(self, field, bson_object, bson_element, field_usage_check):
- # type: (ast.Field, unicode, unicode, _FieldUsageCheckerBase) -> None
+ # type: (ast.Field, str, str, _FieldUsageCheckerBase) -> None
"""Generate the C++ deserializer piece for a field."""
if field.array:
self._gen_usage_check(field, bson_element, field_usage_check)
@@ -1170,7 +1167,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
return
def validate_and_assign_or_uassert(field, expression):
- # type: (ast.Field, unicode) -> None
+ # type: (ast.Field, str) -> None
"""Perform field value validation post-assignment."""
field_name = _get_field_member_name(field)
if field.validator is None:
@@ -1208,9 +1205,9 @@ class _CppSourceFileWriter(_CppFileWriterBase):
object_value = self._gen_field_deserializer_expression(bson_element, field)
if field.chained_struct_field:
# No need for explicit validation as setter will throw for us.
- self._writer.write_line('%s.%s(%s);' %
- (_get_field_member_name(field.chained_struct_field),
- _get_field_member_setter_name(field), object_value))
+ self._writer.write_line(
+ '%s.%s(%s);' % (_get_field_member_name(field.chained_struct_field),
+ _get_field_member_setter_name(field), object_value))
else:
validate_and_assign_or_uassert(field, object_value)
@@ -1233,8 +1230,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
if field.struct_type:
self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
(_get_field_constant_name(field)))
- array_value = '%s::parse(tempContext, sequenceObject)' % (
- common.title_case(field.struct_type))
+ array_value = '%s::parse(tempContext, sequenceObject)' % (common.title_case(
+ field.struct_type))
else:
assert field.bson_serialization_type == ['object']
if field.deserializer:
@@ -1277,9 +1274,9 @@ class _CppSourceFileWriter(_CppFileWriterBase):
field.cpp_type)
if _is_required_serializer_field(field) and needs_init:
initializers.append(
- '%s(%s)' %
- (_get_field_member_name(field),
- cpp_types.get_primitive_scalar_type_default_value(field.cpp_type)))
+ '%s(%s)' % (_get_field_member_name(field),
+ cpp_types.get_primitive_scalar_type_default_value(
+ field.cpp_type)))
# Serialize the _dbName field second
initializes_db_name = False
@@ -1329,7 +1326,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._gen_constructor(struct, required_constructor, False)
def _gen_command_deserializer(self, struct, bson_object):
- # type: (ast.Struct, unicode) -> None
+ # type: (ast.Struct, str) -> None
"""Generate the command field deserializer."""
if isinstance(struct, ast.Command) and struct.command_field:
@@ -1343,7 +1340,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
struct_type_info.gen_namespace_check(self._writer, "_dbName", "commandElement")
def _gen_fields_deserializer_common(self, struct, bson_object):
- # type: (ast.Struct, unicode) -> _FieldUsageCheckerBase
+ # type: (ast.Struct, str) -> _FieldUsageCheckerBase
"""Generate the C++ code to deserialize list of fields."""
# pylint: disable=too-many-branches
field_usage_check = _get_field_usage_checker(self._writer, struct)
@@ -1439,15 +1436,15 @@ class _CppSourceFileWriter(_CppFileWriterBase):
cpp_types.get_primitive_scalar_type_default_value(
struct.command_field.cpp_type)))
else:
- self._writer.write_line('%s localCmdType;' %
- (cpp_type_info.get_storage_type()))
- self._writer.write_line('%s object(localCmdType);' %
- (common.title_case(struct.cpp_name)))
+ self._writer.write_line(
+ '%s localCmdType;' % (cpp_type_info.get_storage_type()))
+ self._writer.write_line(
+ '%s object(localCmdType);' % (common.title_case(struct.cpp_name)))
elif struct.namespace in (common.COMMAND_NAMESPACE_CONCATENATE_WITH_DB,
common.COMMAND_NAMESPACE_CONCATENATE_WITH_DB_OR_UUID):
self._writer.write_line('NamespaceString localNS;')
- self._writer.write_line('%s object(localNS);' %
- (common.title_case(struct.cpp_name)))
+ self._writer.write_line(
+ '%s object(localNS);' % (common.title_case(struct.cpp_name)))
else:
assert "Missing case"
else:
@@ -1457,15 +1454,15 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line('return object;')
def _compare_and_return_status(self, op, limit, field, optional_param):
- # type: (unicode, ast.Expression, ast.Field, unicode) -> None
+ # type: (str, ast.Expression, ast.Field, str) -> None
"""Throw an error on comparison failure."""
with self._block('if (!(value %s %s)) {' % (op, _get_expression(limit)), '}'):
- self._writer.write_line('throwComparisonError<%s>(%s"%s", "%s"_sd, value, %s);' %
- (field.cpp_type, optional_param, field.name, op,
- _get_expression(limit)))
+ self._writer.write_line(
+ 'throwComparisonError<%s>(%s"%s", "%s"_sd, value, %s);' %
+ (field.cpp_type, optional_param, field.name, op, _get_expression(limit)))
def _gen_field_validator(self, struct, field, optional_params):
- # type: (ast.Struct, ast.Field, Tuple[unicode, unicode]) -> None
+ # type: (ast.Struct, ast.Field, Tuple[str, str]) -> None
"""Generate non-trivial field validators."""
validator = field.validator
@@ -1570,8 +1567,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
field_usage_check.add(field, "sequence.name")
if _is_required_serializer_field(field):
- self._writer.write_line('%s = true;' %
- (_get_has_field_member_name(field)))
+ self._writer.write_line(
+ '%s = true;' % (_get_has_field_member_name(field)))
self.gen_doc_sequence_deserializer(field)
@@ -1809,8 +1806,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
with self._block(optional_block_start, '}'):
self._writer.write_line('OpMsg::DocumentSequence documentSequence;')
- self._writer.write_template('documentSequence.name = %s.toString();' %
- (_get_field_constant_name(field)))
+ self._writer.write_template(
+ 'documentSequence.name = %s.toString();' % (_get_field_constant_name(field)))
with self._block('for (const auto& item : %s) {' % (_access_member(field)), '}'):
@@ -1839,9 +1836,9 @@ class _CppSourceFileWriter(_CppFileWriterBase):
struct_type_info = struct_types.get_struct_info(struct)
- with self._block('%s {' %
- (struct_type_info.get_op_msg_request_serializer_method().get_definition()),
- '}'):
+ with self._block(
+ '%s {' % (struct_type_info.get_op_msg_request_serializer_method().get_definition()),
+ '}'):
self._writer.write_line('BSONObjBuilder localBuilder;')
with self._block('{', '}'):
@@ -1884,7 +1881,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_empty_line()
def _gen_known_fields_declaration(self, struct, name, include_op_msg_implicit):
- # type: (ast.Struct, unicode, bool) -> None
+ # type: (ast.Struct, str, bool) -> None
"""Generate the known fields declaration with specified name."""
block_name = common.template_args(
'const std::vector<StringData> ${class_name}::_${name}Fields {', name=name,
@@ -1897,13 +1894,13 @@ class _CppSourceFileWriter(_CppFileWriterBase):
for field in sorted_fields:
self._writer.write_line(
- common.template_args(
- '${class_name}::${constant_name},', class_name=common.title_case(
- struct.cpp_name), constant_name=_get_field_constant_name(field)))
+ common.template_args('${class_name}::${constant_name},',
+ class_name=common.title_case(struct.cpp_name),
+ constant_name=_get_field_constant_name(field)))
self._writer.write_line(
- common.template_args('${class_name}::kCommandName,', class_name=common.title_case(
- struct.cpp_name)))
+ common.template_args('${class_name}::kCommandName,',
+ class_name=common.title_case(struct.cpp_name)))
def gen_known_fields_declaration(self, struct):
# type: (ast.Struct) -> None
@@ -1917,8 +1914,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
def _gen_server_parameter_specialized(self, param):
# type: (ast.ServerParameter) -> None
"""Generate a specialized ServerParameter."""
- self._writer.write_line('return new %s(%s, %s);' % (param.cpp_class.name,
- _encaps(param.name), param.set_at))
+ self._writer.write_line(
+ 'return new %s(%s, %s);' % (param.cpp_class.name, _encaps(param.name), param.set_at))
def _gen_server_parameter_class_definitions(self, param):
# type: (ast.ServerParameter) -> None
@@ -1929,8 +1926,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self.gen_description_comment("%s: %s" % (param.name, param.description))
if param.default:
- self._writer.write_line('constexpr decltype(%s::kDataDefault) %s::kDataDefault;' %
- (cls.name, cls.name))
+ self._writer.write_line(
+ 'constexpr decltype(%s::kDataDefault) %s::kDataDefault;' % (cls.name, cls.name))
self.write_empty_line()
if param.redact:
@@ -1977,8 +1974,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
if param.default and not (param.cpp_vartype and param.cpp_varname):
# Only need to call setValue() if we haven't in-place initialized the declared var.
- self._writer.write_line('uassertStatusOK(ret->setValue(%s));' %
- (_get_expression(param.default)))
+ self._writer.write_line(
+ 'uassertStatusOK(ret->setValue(%s));' % (_get_expression(param.default)))
self._writer.write_line('return ret;')
@@ -1998,11 +1995,12 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line(
common.template_args(
'${unused} auto* ${alias_var} = new IDLServerParameterDeprecatedAlias(${name}, ${param_var});',
- unused='MONGO_COMPILER_VARIABLE_UNUSED', alias_var='scp_%d_%d' %
- (param_no, alias_no), name=_encaps(alias), param_var='scp_%d' % (param_no)))
+ unused='MONGO_COMPILER_VARIABLE_UNUSED',
+ alias_var='scp_%d_%d' % (param_no, alias_no), name=_encaps(alias),
+ param_var='scp_%d' % (param_no)))
def gen_server_parameters(self, params, header_file_name):
- # type: (List[ast.ServerParameter], unicode) -> None
+ # type: (List[ast.ServerParameter], str) -> None
"""Generate IDLServerParameter instances."""
for param in params:
@@ -2014,10 +2012,10 @@ class _CppSourceFileWriter(_CppFileWriterBase):
elif (param.cpp_vartype is not None) and (param.cpp_varname is not None):
with self._condition(param.condition, preprocessor_only=True):
init = ('{%s}' % (param.default.expr)) if param.default else ''
- self._writer.write_line('%s %s%s;' % (param.cpp_vartype, param.cpp_varname,
- init))
+ self._writer.write_line(
+ '%s %s%s;' % (param.cpp_vartype, param.cpp_varname, init))
- blockname = 'idl_' + hashlib.sha1(header_file_name).hexdigest()
+ blockname = 'idl_' + hashlib.sha1(header_file_name.encode()).hexdigest()
with self._block('MONGO_SERVER_PARAMETER_REGISTER(%s)(InitializerContext*) {' % (blockname),
'}'):
# ServerParameter instances.
@@ -2038,7 +2036,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line('return Status::OK();')
def gen_config_option(self, opt, section):
- # type: (ast.ConfigOption, unicode) -> None
+ # type: (ast.ConfigOption, str) -> None
"""Generate Config Option instance."""
# Derive cpp_vartype from arg_vartype if needed.
@@ -2072,16 +2070,16 @@ class _CppSourceFileWriter(_CppFileWriterBase):
for conflicts in opt.conflicts:
self._writer.write_line('.incompatibleWith(%s)' % (_encaps(conflicts)))
if opt.default:
- self._writer.write_line('.setDefault(moe::Value(%s))' %
- (_get_expression(opt.default)))
+ self._writer.write_line(
+ '.setDefault(moe::Value(%s))' % (_get_expression(opt.default)))
if opt.implicit:
- self._writer.write_line('.setImplicit(moe::Value(%s))' %
- (_get_expression(opt.implicit)))
+ self._writer.write_line(
+ '.setImplicit(moe::Value(%s))' % (_get_expression(opt.implicit)))
if opt.duplicates_append:
self._writer.write_line('.composing()')
if (opt.positional_start is not None) and (opt.positional_end is not None):
- self._writer.write_line('.positional(%d, %d)' % (opt.positional_start,
- opt.positional_end))
+ self._writer.write_line(
+ '.positional(%d, %d)' % (opt.positional_start, opt.positional_end))
if opt.canonicalize:
self._writer.write_line('.canonicalize(%s)' % opt.canonicalize)
@@ -2090,8 +2088,8 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self._writer.write_line(
common.template_args(
'.addConstraint(new moe::CallbackKeyConstraint<${argtype}>(${key}, ${callback}))',
- argtype=vartype, key=_encaps(
- opt.name), callback=opt.validator.callback))
+ argtype=vartype, key=_encaps(opt.name),
+ callback=opt.validator.callback))
if (opt.validator.gt is not None) or (opt.validator.lt is not None) or (
opt.validator.gte is not None) or (opt.validator.lte is not None):
@@ -2100,12 +2098,11 @@ class _CppSourceFileWriter(_CppFileWriterBase):
'.addConstraint(new moe::BoundaryKeyConstraint<${argtype}>(${key}, ${gt}, ${lt}, ${gte}, ${lte}))',
argtype=vartype, key=_encaps(opt.name), gt='boost::none'
if opt.validator.gt is None else _get_expression(opt.validator.gt),
- lt='boost::none' if opt.validator.lt is None else _get_expression(
- opt.validator.lt), gte='boost::none'
- if opt.validator.gte is None else _get_expression(
- opt.validator.gte), lte='boost::none'
- if opt.validator.lte is None else _get_expression(
- opt.validator.lte)))
+ lt='boost::none'
+ if opt.validator.lt is None else _get_expression(opt.validator.lt),
+ gte='boost::none' if opt.validator.gte is None else _get_expression(
+ opt.validator.gte), lte='boost::none' if
+ opt.validator.lte is None else _get_expression(opt.validator.lte)))
self.write_empty_line()
@@ -2116,7 +2113,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
for opt in root_opts:
self.gen_config_option(opt, 'options')
- for section_name, section_opts in sections.iteritems():
+ for section_name, section_opts in sections.items():
with self._block('{', '}'):
self._writer.write_line('moe::OptionSection section(%s);' % (_encaps(section_name)))
self.write_empty_line()
@@ -2144,15 +2141,14 @@ class _CppSourceFileWriter(_CppFileWriterBase):
(opt.arg_vartype)) if opt.cpp_vartype is None else opt.cpp_vartype
with self._condition(opt.condition):
with self._block('if (params.count(%s)) {' % (_encaps(opt.name)), '}'):
- self._writer.write_line('%s = params[%s].as<%s>();' % (opt.cpp_varname,
- _encaps(opt.name),
- vartype))
+ self._writer.write_line(
+ '%s = params[%s].as<%s>();' % (opt.cpp_varname, _encaps(opt.name), vartype))
self.write_empty_line()
self._writer.write_line('return Status::OK();')
def gen_config_options(self, spec, header_file_name):
- # type: (ast.IDLAST, unicode) -> None
+ # type: (ast.IDLAST, str) -> None
"""Generate Config Option instances."""
# pylint: disable=too-many-branches,too-many-statements
@@ -2164,13 +2160,13 @@ class _CppSourceFileWriter(_CppFileWriterBase):
if opt.cpp_vartype is not None:
with self._condition(opt.condition, preprocessor_only=True):
init = ('{%s}' % (opt.default.expr)) if opt.default else ''
- self._writer.write_line('%s %s%s;' % (opt.cpp_vartype, opt.cpp_varname,
- init))
+ self._writer.write_line(
+ '%s %s%s;' % (opt.cpp_vartype, opt.cpp_varname, init))
self.write_empty_line()
root_opts = [] # type: List[ast.ConfigOption]
- sections = {} # type: Dict[unicode, List[ast.ConfigOption]]
+ sections = {} # type: Dict[str, List[ast.ConfigOption]]
for opt in spec.configs:
if opt.section:
try:
@@ -2183,12 +2179,13 @@ class _CppSourceFileWriter(_CppFileWriterBase):
initializer = spec.globals.configs and spec.globals.configs.initializer
# pylint: disable=consider-using-ternary
- blockname = (initializer
- and initializer.name) or ('idl_' + hashlib.sha1(header_file_name).hexdigest())
+ blockname = (initializer and initializer.name) or (
+ 'idl_' + hashlib.sha1(header_file_name.encode()).hexdigest())
if initializer and initializer.register:
- with self._block('Status %s(optionenvironment::OptionSection* options_ptr) {' %
- initializer.register, '}'):
+ with self._block(
+ 'Status %s(optionenvironment::OptionSection* options_ptr) {' %
+ initializer.register, '}'):
self._writer.write_line('auto& options = *options_ptr;')
self._gen_config_options_register(root_opts, sections)
else:
@@ -2203,13 +2200,15 @@ class _CppSourceFileWriter(_CppFileWriterBase):
if has_storage_targets:
if initializer and initializer.store:
- with self._block('Status %s(const optionenvironment::Environment& params) {' %
- initializer.store, '}'):
+ with self._block(
+ 'Status %s(const optionenvironment::Environment& params) {' %
+ initializer.store, '}'):
self._gen_config_options_store(spec.configs)
else:
with self.gen_namespace_block(''):
- with self._block('MONGO_STARTUP_OPTIONS_STORE(%s)(InitializerContext*) {' %
- (blockname), '}'):
+ with self._block(
+ 'MONGO_STARTUP_OPTIONS_STORE(%s)(InitializerContext*) {' % (blockname),
+ '}'):
# If all options are guarded by non-passing #ifdefs, then params will be unused.
self._writer.write_line(
'MONGO_COMPILER_VARIABLE_UNUSED const auto& params = optionenvironment::startupOptionsParsed;'
@@ -2219,7 +2218,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
self.write_empty_line()
def generate(self, spec, header_file_name):
- # type: (ast.IDLAST, unicode) -> None
+ # type: (ast.IDLAST, str) -> None
"""Generate the C++ header to a stream."""
self.gen_file_header()
@@ -2315,7 +2314,7 @@ class _CppSourceFileWriter(_CppFileWriterBase):
def generate_header_str(spec):
- # type: (ast.IDLAST) -> unicode
+ # type: (ast.IDLAST) -> str
"""Generate a C++ header in-memory."""
stream = io.StringIO()
text_writer = writer.IndentedTextWriter(stream)
@@ -2328,7 +2327,7 @@ def generate_header_str(spec):
def _generate_header(spec, file_name):
- # type: (ast.IDLAST, unicode) -> None
+ # type: (ast.IDLAST, str) -> None
"""Generate a C++ header."""
str_value = generate_header_str(spec)
@@ -2339,7 +2338,7 @@ def _generate_header(spec, file_name):
def generate_source_str(spec, target_arch, header_file_name):
- # type: (ast.IDLAST, unicode, unicode) -> unicode
+ # type: (ast.IDLAST, str, str) -> str
"""Generate a C++ source file in-memory."""
stream = io.StringIO()
text_writer = writer.IndentedTextWriter(stream)
@@ -2352,7 +2351,7 @@ def generate_source_str(spec, target_arch, header_file_name):
def _generate_source(spec, target_arch, file_name, header_file_name):
- # type: (ast.IDLAST, unicode, unicode, unicode) -> None
+ # type: (ast.IDLAST, str, str, str) -> None
"""Generate a C++ source file."""
str_value = generate_source_str(spec, target_arch, header_file_name)
@@ -2362,7 +2361,7 @@ def _generate_source(spec, target_arch, file_name, header_file_name):
def generate_code(spec, target_arch, output_base_dir, header_file_name, source_file_name):
- # type: (ast.IDLAST, unicode, unicode, unicode, unicode) -> None
+ # type: (ast.IDLAST, str, str, str, str) -> None
"""Generate a C++ header and source file from an idl.ast tree."""
_generate_header(spec, header_file_name)
diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py
index 285ecac8c01..f8e0e1e8943 100644
--- a/buildscripts/idl/idl/parser.py
+++ b/buildscripts/idl/idl/parser.py
@@ -31,7 +31,6 @@ IDL Parser.
Converts a YAML document to an idl.syntax tree.
Only validates the document is syntatically correct, not semantically.
"""
-from __future__ import absolute_import, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
import io
@@ -64,11 +63,11 @@ class _RuleDesc(object):
OPTIONAL = 2
def __init__(self, node_type, required=OPTIONAL, mapping_parser_func=None):
- # type: (unicode, int, Callable[[errors.ParserContext,yaml.nodes.MappingNode], Any]) -> None
+ # type: (str, int, Callable[[errors.ParserContext,yaml.nodes.MappingNode], Any]) -> None
"""Construct a parser rule description."""
- assert required == _RuleDesc.REQUIRED or required == _RuleDesc.OPTIONAL
+ assert required in (_RuleDesc.REQUIRED, _RuleDesc.OPTIONAL)
- self.node_type = node_type # type: unicode
+ self.node_type = node_type # type: str
self.required = required # type: int
self.mapping_parser_func = mapping_parser_func # type: Callable[[errors.ParserContext,yaml.nodes.MappingNode], Any]
@@ -76,9 +75,9 @@ class _RuleDesc(object):
def _generic_parser(
ctxt, # type: errors.ParserContext
node, # type: Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]
- syntax_node_name, # type: unicode
+ syntax_node_name, # type: str
syntax_node, # type: Any
- mapping_rules # type: Dict[unicode, _RuleDesc]
+ mapping_rules # type: Dict[str, _RuleDesc]
): # type: (...) -> None
# pylint: disable=too-many-branches
field_name_set = set() # type: Set[str]
@@ -118,15 +117,15 @@ def _generic_parser(
syntax_node.__dict__[first_name] = rule_desc.mapping_parser_func(
ctxt, second_node)
else:
- raise errors.IDLError("Unknown node_type '%s' for parser rule" %
- (rule_desc.node_type))
+ raise errors.IDLError(
+ "Unknown node_type '%s' for parser rule" % (rule_desc.node_type))
else:
ctxt.add_unknown_node_error(first_node, syntax_node_name)
field_name_set.add(first_name)
# Check for any missing required fields
- for name, rule_desc in mapping_rules.items():
+ for name, rule_desc in list(mapping_rules.items()):
if not rule_desc.required == _RuleDesc.REQUIRED:
continue
@@ -137,16 +136,16 @@ def _generic_parser(
if syntax_node.__dict__[name] is None:
ctxt.add_missing_required_field_error(node, syntax_node_name, name)
else:
- raise errors.IDLError("Unknown node_type '%s' for parser required rule" %
- (rule_desc.node_type))
+ raise errors.IDLError(
+ "Unknown node_type '%s' for parser required rule" % (rule_desc.node_type))
def _parse_mapping(
ctxt, # type: errors.ParserContext
spec, # type: syntax.IDLSpec
node, # type: Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]
- syntax_node_name, # type: unicode
- func # type: Callable[[errors.ParserContext,syntax.IDLSpec,unicode,Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]], None]
+ syntax_node_name, # type: str
+ func # type: Callable[[errors.ParserContext,syntax.IDLSpec,str,Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]], None]
): # type: (...) -> None
"""Parse a top-level mapping section in the IDL file."""
if not ctxt.is_mapping_node(node, syntax_node_name):
@@ -220,7 +219,7 @@ def _parse_imports(ctxt, spec, node):
def _parse_type(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse a type section in the IDL file."""
if not ctxt.is_mapping_node(node, "type"):
return
@@ -431,7 +430,7 @@ def _parse_chained_structs(ctxt, node):
def _parse_struct(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse a struct section in the IDL file."""
if not ctxt.is_mapping_node(node, "struct"):
return
@@ -488,7 +487,7 @@ def _parse_enum_values(ctxt, node):
def _parse_enum(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse an enum section in the IDL file."""
if not ctxt.is_mapping_node(node, "struct"):
return
@@ -510,7 +509,7 @@ def _parse_enum(ctxt, spec, name, node):
def _parse_command(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse a command section in the IDL file."""
if not ctxt.is_mapping_node(node, "command"):
return
@@ -578,7 +577,7 @@ def _parse_server_parameter_class(ctxt, node):
def _parse_server_parameter(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse a server_parameters section in the IDL file."""
if not ctxt.is_mapping_node(node, "server_parameters"):
return
@@ -609,7 +608,7 @@ def _parse_server_parameter(ctxt, spec, name, node):
def _parse_config_option(ctxt, spec, name, node):
- # type: (errors.ParserContext, syntax.IDLSpec, unicode, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
+ # type: (errors.ParserContext, syntax.IDLSpec, str, Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]) -> None
"""Parse a configs section in the IDL file."""
if not ctxt.is_mapping_node(node, "configs"):
return
@@ -646,7 +645,7 @@ def _parse_config_option(ctxt, spec, name, node):
def _prefix_with_namespace(cpp_namespace, cpp_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Preface a C++ type name with a namespace if not already qualified or a primitive type."""
if "::" in cpp_name or cpp_types.is_primitive_scalar_type(cpp_name):
return cpp_name
@@ -676,7 +675,7 @@ def _propagate_globals(spec):
def _parse(stream, error_file_name):
- # type: (Any, unicode) -> syntax.IDLParsedSpec
+ # type: (Any, str) -> syntax.IDLParsedSpec
"""
Parse a YAML document into an idl.syntax tree.
@@ -740,11 +739,9 @@ def _parse(stream, error_file_name):
return syntax.IDLParsedSpec(spec, None)
-class ImportResolverBase(object):
+class ImportResolverBase(object, metaclass=ABCMeta):
"""Base class for resolving imported files."""
- __metaclass__ = ABCMeta
-
def __init__(self):
# type: () -> None
"""Construct a ImportResolver."""
@@ -752,19 +749,19 @@ class ImportResolverBase(object):
@abstractmethod
def resolve(self, base_file, imported_file_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Return the complete path to an imported file name."""
pass
@abstractmethod
def open(self, resolved_file_name):
- # type: (unicode) -> Any
+ # type: (str) -> Any
"""Return an io.Stream for the requested file."""
pass
def parse(stream, input_file_name, resolver):
- # type: (Any, unicode, ImportResolverBase) -> syntax.IDLParsedSpec
+ # type: (Any, str, ImportResolverBase) -> syntax.IDLParsedSpec
"""
Parse a YAML document into an idl.syntax tree.
@@ -778,13 +775,13 @@ def parse(stream, input_file_name, resolver):
if root_doc.errors:
return root_doc
- imports = [] # type: List[Tuple[common.SourceLocation, unicode, unicode]]
- needs_include = [] # type: List[unicode]
+ imports = [] # type: List[Tuple[common.SourceLocation, str, str]]
+ needs_include = [] # type: List[str]
if root_doc.spec.imports:
imports = [(root_doc.spec.imports, input_file_name, import_file_name)
for import_file_name in root_doc.spec.imports.imports]
- resolved_file_names = [] # type: List[unicode]
+ resolved_file_names = [] # type: List[str]
ctxt = errors.ParserContext(input_file_name, errors.ParserErrorCollection())
diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py
index 8be2165945f..f5d7a7489d9 100644
--- a/buildscripts/idl/idl/struct_types.py
+++ b/buildscripts/idl/idl/struct_types.py
@@ -27,8 +27,6 @@
#
"""Provide code generation information for structs and commands in a polymorphic way."""
-from __future__ import absolute_import, print_function, unicode_literals
-
from abc import ABCMeta, abstractmethod
from typing import Optional, List
@@ -46,7 +44,7 @@ def _is_required_constructor_arg(field):
def _get_arg_for_field(field):
- # type: (ast.Field) -> unicode
+ # type: (ast.Field) -> str
"""Generate a moveable parameter."""
cpp_type_info = cpp_types.get_cpp_type(field)
# Use the storage type for the constructor argument since the generated code will use std::move.
@@ -56,7 +54,7 @@ def _get_arg_for_field(field):
def _get_required_parameters(struct):
- # type: (ast.Struct) -> List[unicode]
+ # type: (ast.Struct) -> List[str]
"""Get a list of arguments for required parameters."""
return [
_get_arg_for_field(field) for field in struct.fields if _is_required_constructor_arg(field)
@@ -67,7 +65,7 @@ class ArgumentInfo(object):
"""Class that encapsulates information about an argument to a method."""
def __init__(self, arg):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Create a instance of the ArgumentInfo class by parsing the argument string."""
parts = arg.split(' ')
self.type = ' '.join(parts[0:-1])
@@ -84,7 +82,7 @@ class MethodInfo(object):
def __init__(self, class_name, method_name, args, return_type=None, static=False, const=False,
explicit=False):
- # type: (unicode, unicode, List[unicode], unicode, bool, bool, bool) -> None
+ # type: (str, str, List[str], str, bool, bool, bool) -> None
# pylint: disable=too-many-arguments
"""Create a MethodInfo instance."""
self.class_name = class_name
@@ -96,7 +94,7 @@ class MethodInfo(object):
self.explicit = explicit
def get_declaration(self):
- # type: () -> unicode
+ # type: () -> str
"""Get a declaration for a method."""
pre_modifiers = ''
post_modifiers = ''
@@ -120,7 +118,7 @@ class MethodInfo(object):
args=', '.join([str(arg) for arg in self.args]), post_modifiers=post_modifiers)
def get_definition(self):
- # type: () -> unicode
+ # type: () -> str
"""Get a definition for a method."""
pre_modifiers = ''
post_modifiers = ''
@@ -139,7 +137,7 @@ class MethodInfo(object):
[str(arg) for arg in self.args]), post_modifiers=post_modifiers)
def get_call(self, obj):
- # type: (Optional[unicode]) -> unicode
+ # type: (Optional[str]) -> str
"""Generate a simply call to the method using the defined args list."""
args = ', '.join([arg.name for arg in self.args])
@@ -152,11 +150,9 @@ class MethodInfo(object):
args=args)
-class StructTypeInfoBase(object):
+class StructTypeInfoBase(object, metaclass=ABCMeta):
"""Base class for struct and command code generation."""
- __metaclass__ = ABCMeta
-
@abstractmethod
def get_constructor_method(self):
# type: () -> MethodInfo
@@ -234,7 +230,7 @@ class StructTypeInfoBase(object):
@abstractmethod
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
"""Generate the namespace check predicate for a command."""
pass
@@ -306,7 +302,7 @@ class _StructTypeInfo(StructTypeInfoBase):
pass
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
pass
@@ -369,12 +365,12 @@ class _IgnoredCommandTypeInfo(_CommandBaseTypeInfo):
indented_writer.write_line('builder->append("%s"_sd, 1);' % (self._command.name))
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
pass
def _get_command_type_parameter(command):
- # type: (ast.Command) -> unicode
+ # type: (ast.Command) -> str
"""Get the parameter for the command type."""
cpp_type_info = cpp_types.get_cpp_type(command.command_field)
# Use the storage type for the constructor argument since the generated code will use std::move.
@@ -440,7 +436,7 @@ class _CommandFromType(_CommandBaseTypeInfo):
raise NotImplementedError
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
# TODO: should the name of the first element be validated??
raise NotImplementedError
@@ -500,11 +496,11 @@ class _CommandWithNamespaceTypeInfo(_CommandBaseTypeInfo):
indented_writer.write_empty_line()
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
# TODO: should the name of the first element be validated??
indented_writer.write_line('invariant(_nss.isEmpty());')
- indented_writer.write_line('_nss = ctxt.parseNSCollectionRequired(%s, %s);' % (db_name,
- element))
+ indented_writer.write_line(
+ '_nss = ctxt.parseNSCollectionRequired(%s, %s);' % (db_name, element))
class _CommandWithUUIDNamespaceTypeInfo(_CommandBaseTypeInfo):
@@ -566,12 +562,12 @@ class _CommandWithUUIDNamespaceTypeInfo(_CommandBaseTypeInfo):
indented_writer.write_line(
'_nssOrUUID.uuid().get().appendToBuilder(builder, "%s"_sd);' % (self._command.name))
with writer.IndentedScopedBlock(indented_writer, "else {", "}"):
- indented_writer.write_line('builder->append("%s"_sd, _nssOrUUID.nss().get().coll());' %
- (self._command.name))
+ indented_writer.write_line(
+ 'builder->append("%s"_sd, _nssOrUUID.nss().get().coll());' % (self._command.name))
indented_writer.write_empty_line()
def gen_namespace_check(self, indented_writer, db_name, element):
- # type: (writer.IndentedTextWriter, unicode, unicode) -> None
+ # type: (writer.IndentedTextWriter, str, str) -> None
indented_writer.write_line('invariant(_nssOrUUID.nss() || _nssOrUUID.uuid());')
indented_writer.write_line('_nssOrUUID = ctxt.parseNsOrUUID(%s, %s);' % (db_name, element))
diff --git a/buildscripts/idl/idl/syntax.py b/buildscripts/idl/idl/syntax.py
index 6051069d126..10a4350f25f 100644
--- a/buildscripts/idl/idl/syntax.py
+++ b/buildscripts/idl/idl/syntax.py
@@ -33,8 +33,6 @@ It maps 1-1 to the YAML file, and has not been checked if
it follows the rules of the IDL, etc.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
import itertools
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
@@ -72,7 +70,7 @@ class IDLSpec(object):
def parse_array_type(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Parse a type name of the form 'array<type>' and extract type."""
if not name.startswith("array<") and not name.endswith(">"):
return None
@@ -96,8 +94,7 @@ def _zip_scalar(items, obj):
def _item_and_type(dic):
# type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]]
"""Return an Iterator of (key, value) pairs from a dictionary."""
- return itertools.chain.from_iterable(
- (_zip_scalar(value, key) for (key, value) in dic.viewitems()))
+ return itertools.chain.from_iterable((_zip_scalar(value, key) for (key, value) in dic.items()))
class SymbolTable(object):
@@ -117,7 +114,7 @@ class SymbolTable(object):
self.types = [] # type: List[Type]
def _is_duplicate(self, ctxt, location, name, duplicate_class_name):
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> bool
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> bool
"""Return true if the given item already exist in the symbol table."""
for (item, entity_type) in _item_and_type({
"command": self.commands,
@@ -181,12 +178,12 @@ class SymbolTable(object):
self.add_type(ctxt, idltype)
def resolve_field_type(self, ctxt, location, field_name, type_name):
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]]
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]]
"""Find the type or struct a field refers to or log an error."""
return self._resolve_field_type(ctxt, location, field_name, type_name)
def _resolve_field_type(self, ctxt, location, field_name, type_name):
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]]
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]]
"""Find the type or struct a field refers to or log an error."""
# pylint: disable=too-many-return-statements
@@ -228,10 +225,10 @@ class Global(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Global."""
- self.cpp_namespace = None # type: unicode
- self.cpp_includes = [] # type: List[unicode]
+ self.cpp_namespace = None # type: str
+ self.cpp_includes = [] # type: List[str]
self.configs = None # type: ConfigGlobal
super(Global, self).__init__(file_name, line, column)
@@ -241,15 +238,15 @@ class Import(common.SourceLocation):
"""IDL imports object."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Imports section."""
- self.imports = [] # type: List[unicode]
+ self.imports = [] # type: List[str]
# These are not part of the IDL syntax but are produced by the parser.
# List of imports with structs.
- self.resolved_imports = [] # type: List[unicode]
+ self.resolved_imports = [] # type: List[str]
# All imports directly or indirectly included
- self.dependencies = [] # type: List[unicode]
+ self.dependencies = [] # type: List[str]
super(Import, self).__init__(file_name, line, column)
@@ -266,16 +263,16 @@ class Type(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Type."""
- self.name = None # type: unicode
- self.description = None # type: unicode
- self.cpp_type = None # type: unicode
- self.bson_serialization_type = None # type: List[unicode]
- self.bindata_subtype = None # type: unicode
- self.serializer = None # type: unicode
- self.deserializer = None # type: unicode
- self.default = None # type: unicode
+ self.name = None # type: str
+ self.description = None # type: str
+ self.cpp_type = None # type: str
+ self.bson_serialization_type = None # type: List[str]
+ self.bindata_subtype = None # type: str
+ self.serializer = None # type: str
+ self.deserializer = None # type: str
+ self.default = None # type: str
super(Type, self).__init__(file_name, line, column)
@@ -291,7 +288,7 @@ class Validator(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Validator."""
# Don't lint gt/lt as bad attibute names.
# pylint: disable=C0103
@@ -299,7 +296,7 @@ class Validator(common.SourceLocation):
self.lt = None # type: Expression
self.gte = None # type: Expression
self.lte = None # type: Expression
- self.callback = None # type: unicode
+ self.callback = None # type: str
super(Validator, self).__init__(file_name, line, column)
@@ -316,15 +313,15 @@ class Field(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Field."""
- self.name = None # type: unicode
- self.cpp_name = None # type: unicode
- self.description = None # type: unicode
- self.type = None # type: unicode
+ self.name = None # type: str
+ self.cpp_name = None # type: str
+ self.description = None # type: str
+ self.type = None # type: str
self.ignore = False # type: bool
self.optional = False # type: bool
- self.default = None # type: unicode
+ self.default = None # type: str
self.supports_doc_sequence = False # type: bool
self.comparison_order = -1 # type: int
self.validator = None # type: Validator
@@ -344,10 +341,10 @@ class ChainedStruct(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Type."""
- self.name = None # type: unicode
- self.cpp_name = None # type: unicode
+ self.name = None # type: str
+ self.cpp_name = None # type: str
super(ChainedStruct, self).__init__(file_name, line, column)
@@ -360,10 +357,10 @@ class ChainedType(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Type."""
- self.name = None # type: unicode
- self.cpp_name = None # type: unicode
+ self.name = None # type: str
+ self.cpp_name = None # type: str
super(ChainedType, self).__init__(file_name, line, column)
@@ -378,10 +375,10 @@ class Struct(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Struct."""
- self.name = None # type: unicode
- self.description = None # type: unicode
+ self.name = None # type: str
+ self.description = None # type: str
self.strict = True # type: bool
self.immutable = False # type: bool
self.inline_chained_structs = True # type: bool
@@ -391,14 +388,14 @@ class Struct(common.SourceLocation):
self.fields = None # type: List[Field]
# Command only property
- self.cpp_name = None # type: unicode
+ self.cpp_name = None # type: str
# Internal property that is not represented as syntax. An imported struct is read from an
# imported file, and no code is generated for it.
self.imported = False # type: bool
# Internal property: cpp_namespace from globals section
- self.cpp_namespace = None # type: unicode
+ self.cpp_namespace = None # type: str
super(Struct, self).__init__(file_name, line, column)
@@ -411,10 +408,10 @@ class Command(Struct):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Command."""
- self.namespace = None # type: unicode
- self.type = None # type: unicode
+ self.namespace = None # type: str
+ self.type = None # type: str
super(Command, self).__init__(file_name, line, column)
@@ -427,10 +424,10 @@ class EnumValue(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Enum."""
- self.name = None # type: unicode
- self.value = None # type: unicode
+ self.name = None # type: str
+ self.value = None # type: str
super(EnumValue, self).__init__(file_name, line, column)
@@ -443,11 +440,11 @@ class Enum(common.SourceLocation):
"""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Enum."""
- self.name = None # type: unicode
- self.description = None # type: unicode
- self.type = None # type: unicode
+ self.name = None # type: str
+ self.description = None # type: str
+ self.type = None # type: str
self.values = None # type: List[EnumValue]
# Internal property that is not represented as syntax. An imported enum is read from an
@@ -455,7 +452,7 @@ class Enum(common.SourceLocation):
self.imported = False # type: bool
# Internal property: cpp_namespace from globals section
- self.cpp_namespace = None # type: unicode
+ self.cpp_namespace = None # type: str
super(Enum, self).__init__(file_name, line, column)
@@ -464,11 +461,11 @@ class Condition(common.SourceLocation):
"""Condition(s) for a ServerParameter or ConfigOption."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a Condition."""
- self.expr = None # type: unicode
- self.constexpr = None # type: unicode
- self.preprocessor = None # type: unicode
+ self.expr = None # type: str
+ self.constexpr = None # type: str
+ self.preprocessor = None # type: str
super(Condition, self).__init__(file_name, line, column)
@@ -477,11 +474,11 @@ class Expression(common.SourceLocation):
"""Description of a valid C++ expression."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct an Expression."""
- self.literal = None # type: unicode
- self.expr = None # type: unicode
+ self.literal = None # type: str
+ self.expr = None # type: str
self.is_constexpr = True # type: bool
super(Expression, self).__init__(file_name, line, column)
@@ -491,11 +488,11 @@ class ServerParameterClass(common.SourceLocation):
"""ServerParameter as C++ class specialization."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ServerParameterClass."""
- self.name = None # type: unicode
- self.data = None # type: unicode
+ self.name = None # type: str
+ self.data = None # type: str
self.override_ctor = False # type: bool
self.override_set = False # type: bool
@@ -508,23 +505,23 @@ class ServerParameter(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ServerParameter."""
- self.name = None # type: unicode
- self.set_at = None # type: List[unicode]
- self.description = None # type: unicode
- self.cpp_vartype = None # type: unicode
- self.cpp_varname = None # type: unicode
+ self.name = None # type: str
+ self.set_at = None # type: List[str]
+ self.description = None # type: str
+ self.cpp_vartype = None # type: str
+ self.cpp_varname = None # type: str
self.cpp_class = None # type: ServerParameterClass
self.condition = None # type: Condition
- self.deprecated_name = [] # type: List[unicode]
+ self.deprecated_name = [] # type: List[str]
self.redact = False # type: bool
self.test_only = False # type: bool
self.default = None # type: Expression
# Only valid if cpp_varname is specified.
self.validator = None # type: Validator
- self.on_update = None # type: unicode
+ self.on_update = None # type: str
super(ServerParameter, self).__init__(file_name, line, column)
@@ -533,12 +530,12 @@ class GlobalInitializer(common.SourceLocation):
"""Initializer details for custom registration/storage."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a GlobalInitializer."""
- self.name = None # type: unicode
- self.register = None # type: unicode
- self.store = None # type: unicode
+ self.name = None # type: str
+ self.register = None # type: str
+ self.store = None # type: str
super(GlobalInitializer, self).__init__(file_name, line, column)
@@ -547,10 +544,10 @@ class ConfigGlobal(common.SourceLocation):
"""Global values to apply to all ConfigOptions."""
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ConfigGlobal."""
- self.section = None # type: unicode
- self.source = [] # type: List[unicode]
+ self.section = None # type: str
+ self.source = [] # type: List[str]
self.initializer = None # type: GlobalInitializer
super(ConfigGlobal, self).__init__(file_name, line, column)
@@ -562,32 +559,32 @@ class ConfigOption(common.SourceLocation):
# pylint: disable=too-many-instance-attributes
def __init__(self, file_name, line, column):
- # type: (unicode, int, int) -> None
+ # type: (str, int, int) -> None
"""Construct a ConfigOption."""
- self.name = None # type: unicode
- self.deprecated_name = [] # type: List[unicode]
- self.short_name = None # type: unicode
- self.single_name = None # type: unicode
- self.deprecated_short_name = [] # type: List[unicode]
+ self.name = None # type: str
+ self.deprecated_name = [] # type: List[str]
+ self.short_name = None # type: str
+ self.single_name = None # type: str
+ self.deprecated_short_name = [] # type: List[str]
self.description = None # type: Expression
- self.section = None # type: unicode
- self.arg_vartype = None # type: unicode
- self.cpp_vartype = None # type: unicode
- self.cpp_varname = None # type: unicode
+ self.section = None # type: str
+ self.arg_vartype = None # type: str
+ self.cpp_vartype = None # type: str
+ self.cpp_varname = None # type: str
self.condition = None # type: Condition
- self.conflicts = [] # type: List[unicode]
- self.requires = [] # type: List[unicode]
+ self.conflicts = [] # type: List[str]
+ self.requires = [] # type: List[str]
self.hidden = False # type: bool
self.redact = False # type: bool
self.default = None # type: Expression
self.implicit = None # type: Expression
- self.source = [] # type: List[unicode]
- self.canonicalize = None # type: unicode
+ self.source = [] # type: List[str]
+ self.canonicalize = None # type: str
- self.duplicate_behavior = None # type: unicode
- self.positional = None # type unicode
+ self.duplicate_behavior = None # type: str
+ self.positional = None # type str
self.validator = None # type: Validator
super(ConfigOption, self).__init__(file_name, line, column)
diff --git a/buildscripts/idl/idl/writer.py b/buildscripts/idl/idl/writer.py
index 6037821287c..4e82fcf1f21 100644
--- a/buildscripts/idl/idl/writer.py
+++ b/buildscripts/idl/idl/writer.py
@@ -27,8 +27,6 @@
#
"""Text Writing Utilites."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import io
import string
from typing import List, Mapping, Union
@@ -40,7 +38,7 @@ _INDENT_SPACE_COUNT = 4
def _fill_spaces(count):
- # type: (int) -> unicode
+ # type: (int) -> str
"""Fill a string full of spaces."""
fill = ''
for _ in range(count * _INDENT_SPACE_COUNT):
@@ -50,7 +48,7 @@ def _fill_spaces(count):
def _indent_text(count, unindented_text):
- # type: (int, unicode) -> unicode
+ # type: (int, str) -> str
"""Indent each line of a multi-line string."""
lines = unindented_text.splitlines()
fill = _fill_spaces(count)
@@ -58,7 +56,7 @@ def _indent_text(count, unindented_text):
def is_function(name):
- # type: (unicode) -> bool
+ # type: (str) -> bool
"""
Return True if a serializer/deserializer is function.
@@ -69,7 +67,7 @@ def is_function(name):
def get_method_name(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
"""Get a method name from a fully qualified method name."""
pos = name.rfind('::')
if pos == -1:
@@ -78,7 +76,7 @@ def get_method_name(name):
def get_method_name_from_qualified_method_name(name):
- # type: (unicode) -> unicode
+ # type: (str) -> str
# pylint: disable=invalid-name
"""Get a method name from a fully qualified method name."""
# TODO: in the future, we may want to support full-qualified calls to static methods
@@ -108,10 +106,10 @@ class IndentedTextWriter(object):
"""Create an indented text writer."""
self._stream = stream
self._indent = 0
- self._template_context = None # type: Mapping[unicode, unicode]
+ self._template_context = None # type: Mapping[str, str]
def write_unindented_line(self, msg):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Write an unindented line to the stream, no template formattin applied."""
self._stream.write(msg)
self._stream.write("\n")
@@ -128,13 +126,13 @@ class IndentedTextWriter(object):
self._indent -= 1
def write_line(self, msg):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Write a line to the stream, no template formattin applied."""
self._stream.write(_indent_text(self._indent, msg))
self._stream.write("\n")
def set_template_mapping(self, template_params):
- # type: (Mapping[unicode,unicode]) -> None
+ # type: (Mapping[str,str]) -> None
"""Set the current template mapping parameters for string.Template formatting."""
assert not self._template_context
self._template_context = template_params
@@ -146,7 +144,7 @@ class IndentedTextWriter(object):
self._template_context = None
def write_template(self, template):
- # type: (unicode) -> None
+ # type: (str) -> None
"""Write a template to the stream."""
msg = common.template_format(template, self._template_context)
self._stream.write(_indent_text(self._indent, msg))
@@ -162,7 +160,7 @@ class TemplateContext(object):
"""Set the template context for the writer."""
def __init__(self, writer, template_params):
- # type: (IndentedTextWriter, Mapping[unicode,unicode]) -> None
+ # type: (IndentedTextWriter, Mapping[str,str]) -> None
"""Create a template context."""
self._writer = writer
self._template_context = template_params
@@ -215,7 +213,7 @@ class IndentedScopedBlock(WriterBlock):
"""Generate a block, template the parameters, and indent the contents."""
def __init__(self, writer, opening, closing):
- # type: (IndentedTextWriter, unicode, unicode) -> None
+ # type: (IndentedTextWriter, str, str) -> None
"""Create a block."""
self._writer = writer
self._opening = opening
@@ -238,7 +236,7 @@ class NamespaceScopeBlock(WriterBlock):
"""Generate an unindented blocks for a list of namespaces, and do not indent the contents."""
def __init__(self, indented_writer, namespaces):
- # type: (IndentedTextWriter, List[unicode]) -> None
+ # type: (IndentedTextWriter, List[str]) -> None
"""Create a block."""
self._writer = indented_writer
self._namespaces = namespaces
@@ -262,7 +260,7 @@ class UnindentedBlock(WriterBlock):
"""Generate a block without indentation."""
def __init__(self, writer, opening, closing):
- # type: (IndentedTextWriter, unicode, unicode) -> None
+ # type: (IndentedTextWriter, str, str) -> None
"""Create a block."""
self._writer = writer
self._opening = opening
diff --git a/buildscripts/idl/idlc.py b/buildscripts/idl/idlc.py
index 13653896195..2a98df124f8 100644
--- a/buildscripts/idl/idlc.py
+++ b/buildscripts/idl/idlc.py
@@ -29,8 +29,6 @@
#
"""IDL Compiler Driver Main Entry point."""
-from __future__ import absolute_import, print_function
-
import argparse
import logging
import sys
diff --git a/buildscripts/idl/run_tests.py b/buildscripts/idl/run_tests.py
index 3a93b9caac1..ab9e3e8930e 100644
--- a/buildscripts/idl/run_tests.py
+++ b/buildscripts/idl/run_tests.py
@@ -32,7 +32,6 @@ IDL Unit Test runner.
Generates a file called results.xml in the XUnit format.
"""
-from __future__ import absolute_import, print_function
import sys
import unittest
diff --git a/buildscripts/idl/tests/test_binder.py b/buildscripts/idl/tests/test_binder.py
index ab41e9cab0d..ff65948d68a 100644
--- a/buildscripts/idl/tests/test_binder.py
+++ b/buildscripts/idl/tests/test_binder.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
#
# Copyright (C) 2018-present MongoDB, Inc.
#
@@ -30,8 +30,6 @@
# pylint: disable=too-many-lines
"""Test cases for IDL binder."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import textwrap
import unittest
@@ -61,7 +59,7 @@ def fill_spaces(count):
def indent_text(count, unindented_text):
- # type: (int, unicode) -> unicode
+ # type: (int, str) -> str
"""Indent each line of a multi-line string."""
lines = unindented_text.splitlines()
fill = fill_spaces(count)
@@ -88,7 +86,7 @@ class TestBinder(testcase.IDLTestcase):
cpp_includes:
- 'bar'
- 'foo'"""))
- self.assertEquals(spec.globals.cpp_namespace, "something")
+ self.assertEqual(spec.globals.cpp_namespace, "something")
self.assertListEqual(spec.globals.cpp_includes, ['bar', 'foo'])
def test_type_positive(self):
@@ -566,7 +564,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# Test array as name
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
array<foo>:
description: foo
@@ -675,7 +674,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# Test field of a struct type with a default
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -692,7 +692,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_FIELD_MUST_BE_EMPTY_FOR_STRUCT)
# Test array as field name
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -702,7 +703,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_ARRAY_NOT_VALID_TYPE)
# Test recursive array as field type
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -712,7 +714,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_BAD_ARRAY_TYPE_NAME)
# Test inherited default with array
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -743,7 +746,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_ARRAY_NO_DEFAULT)
# Test bindata with default
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -755,7 +759,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_BAD_BINDATA_DEFAULT)
# Test default and optional for the same field
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -768,7 +773,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_ILLEGAL_FIELD_DEFAULT_AND_OPTIONAL)
# Test duplicate comparison order
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -861,7 +867,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# Chaining with strict struct
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -871,7 +878,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_CHAINED_NO_TYPE_STRICT)
# Non-'any' type as chained type
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -881,7 +889,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_CHAINED_TYPE_WRONG_BSON_TYPE)
# Chaining and fields only with same name
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -893,7 +902,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_CHAINED_DUPLICATE_FIELD)
# Non-existent chained type
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -905,7 +915,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_UNKNOWN_TYPE)
# A regular field as a chained type
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -916,7 +927,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_UNKNOWN_TYPE)
# Array of chained types
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
bar1:
description: foo
@@ -963,8 +975,9 @@ class TestBinder(testcase.IDLTestcase):
""")
# A struct with only chaining
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -973,8 +986,9 @@ class TestBinder(testcase.IDLTestcase):
""")))
# Chaining struct's fields and explicit fields
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -985,8 +999,9 @@ class TestBinder(testcase.IDLTestcase):
""")))
# Chained types and structs
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -999,8 +1014,9 @@ class TestBinder(testcase.IDLTestcase):
""")))
# Non-strict chained struct
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1011,8 +1027,9 @@ class TestBinder(testcase.IDLTestcase):
""")))
# Inline Chained struct with strict true
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -1031,8 +1048,9 @@ class TestBinder(testcase.IDLTestcase):
""")))
# Inline Chained struct with strict true and inline_chained_structs defaulted
- self.assert_bind(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind(test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -1086,8 +1104,10 @@ class TestBinder(testcase.IDLTestcase):
""")
# Non-existing chained struct
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -1096,8 +1116,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_UNKNOWN_TYPE)
# Type as chained struct
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -1106,8 +1128,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_STRUCT_NOT_FOUND)
# Struct as chained type
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1116,8 +1140,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_TYPE_NOT_FOUND)
# Duplicated field names across chained struct's fields and fields
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1128,8 +1154,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_DUPLICATE_FIELD)
# Duplicated field names across chained structs
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1139,8 +1167,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_DUPLICATE_FIELD)
# Chained struct with strict true
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: true
@@ -1159,8 +1189,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_NO_NESTED_STRUCT_STRICT)
# Chained struct with nested chained struct
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1178,8 +1210,10 @@ class TestBinder(testcase.IDLTestcase):
""")), idl.errors.ERROR_ID_CHAINED_NO_NESTED_CHAINED)
# Chained struct with nested chained type
- self.assert_bind_fail(test_preamble + indent_text(1,
- textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + indent_text(
+ 1,
+ textwrap.dedent("""
bar1:
description: foo
strict: false
@@ -1304,7 +1338,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# Test array of enums
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo1:
description: foo
@@ -1355,7 +1390,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# Commands cannot be fields in other commands
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1371,7 +1407,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_FIELD_NO_COMMAND)
# Commands cannot be fields in structs
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1387,7 +1424,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_FIELD_NO_COMMAND)
# Commands cannot have a field as the same name
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1488,7 +1526,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# A struct
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
structs:
foo:
description: foo
@@ -1499,7 +1538,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_STRUCT_NO_DOC_SEQUENCE)
# A non-array type
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1511,7 +1551,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_NO_DOC_SEQUENCE_FOR_NON_ARRAY)
# An array of a scalar
- self.assert_bind_fail(test_preamble2 + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble2 + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1523,7 +1564,8 @@ class TestBinder(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_NO_DOC_SEQUENCE_FOR_NON_OBJECT)
# An array of 'any'
- self.assert_bind_fail(test_preamble2 + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble2 + textwrap.dedent("""
commands:
foo:
description: foo
@@ -1585,7 +1627,8 @@ class TestBinder(testcase.IDLTestcase):
""")
# supports_doc_sequence must be a bool
- self.assert_bind_fail(test_preamble + textwrap.dedent("""
+ self.assert_bind_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
diff --git a/buildscripts/idl/tests/test_generator.py b/buildscripts/idl/tests/test_generator.py
index 83d0a1a7db5..393c9d7754d 100644
--- a/buildscripts/idl/tests/test_generator.py
+++ b/buildscripts/idl/tests/test_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
#
# Copyright (C) 2018-present MongoDB, Inc.
#
@@ -36,8 +36,6 @@ idl base directory:
$ coverage run run_tests.py && coverage html
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
import os
import unittest
@@ -72,8 +70,8 @@ class TestGenerator(testcase.IDLTestcase):
unittest_idl_file = os.path.join(idl_dir, 'unittest.idl')
if not os.path.exists(unittest_idl_file):
- unittest.skip("Skipping IDL Generator testing since %s could not be found." %
- (unittest_idl_file))
+ unittest.skip(
+ "Skipping IDL Generator testing since %s could not be found." % (unittest_idl_file))
return
args.input_file = os.path.join(idl_dir, 'unittest_import.idl')
diff --git a/buildscripts/idl/tests/test_import.py b/buildscripts/idl/tests/test_import.py
index a9aa64d2f40..89026dd9162 100644
--- a/buildscripts/idl/tests/test_import.py
+++ b/buildscripts/idl/tests/test_import.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
#
# Copyright (C) 2018-present MongoDB, Inc.
#
@@ -29,8 +29,6 @@
#
"""Test cases for IDL binder."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import io
import textwrap
import unittest
@@ -67,7 +65,7 @@ class DictionaryImportResolver(idl.parser.ImportResolverBase):
return "imported_%s" % (imported_file_name)
def open(self, resolved_file_name):
- # type: (unicode) -> Any
+ # type: (str) -> Any
"""Return an io.Stream for the requested file."""
assert resolved_file_name.startswith("imported_")
imported_file_name = resolved_file_name.replace("imported_", "")
diff --git a/buildscripts/idl/tests/test_parser.py b/buildscripts/idl/tests/test_parser.py
index 3e7c37acb5a..b1531cc12b8 100644
--- a/buildscripts/idl/tests/test_parser.py
+++ b/buildscripts/idl/tests/test_parser.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
#
# Copyright (C) 2018-present MongoDB, Inc.
#
@@ -30,8 +30,6 @@
"""Test cases for IDL parser."""
# pylint: disable=too-many-lines
-from __future__ import absolute_import, print_function, unicode_literals
-
import textwrap
import unittest
@@ -964,7 +962,8 @@ class TestParser(testcase.IDLTestcase):
""")
# Commands and structs with same name
- self.assert_parse_fail(test_preamble + textwrap.dedent("""
+ self.assert_parse_fail(
+ test_preamble + textwrap.dedent("""
commands:
foo:
description: foo
@@ -980,7 +979,8 @@ class TestParser(testcase.IDLTestcase):
"""), idl.errors.ERROR_ID_DUPLICATE_SYMBOL)
# Commands and types with same name
- self.assert_parse_fail(test_preamble + textwrap.dedent("""
+ self.assert_parse_fail(
+ test_preamble + textwrap.dedent("""
commands:
string:
description: foo
diff --git a/buildscripts/idl/tests/testcase.py b/buildscripts/idl/tests/testcase.py
index 132a6190318..ed1eb748b51 100644
--- a/buildscripts/idl/tests/testcase.py
+++ b/buildscripts/idl/tests/testcase.py
@@ -27,8 +27,6 @@
#
"""Utility methods and classes for testing IDL passes."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import unittest
from typing import Any, Tuple
@@ -53,12 +51,12 @@ class NothingImportResolver(idl.parser.ImportResolverBase):
"""An import resolver that does nothing."""
def resolve(self, base_file, imported_file_name):
- # type: (unicode, unicode) -> unicode
+ # type: (str, str) -> str
"""Return the complete path to an imported file name."""
raise NotImplementedError()
def open(self, imported_file_name):
- # type: (unicode) -> Any
+ # type: (str) -> Any
"""Return an io.Stream for the requested file."""
raise NotImplementedError()
@@ -67,7 +65,7 @@ class IDLTestcase(unittest.TestCase):
"""IDL Test case base class."""
def _parse(self, doc_str, resolver):
- # type: (unicode, idl.parser.ImportResolverBase) -> idl.syntax.IDLParsedSpec
+ # type: (str, idl.parser.ImportResolverBase) -> idl.syntax.IDLParsedSpec
"""Parse a document and throw a unittest failure if it fails to parse as a valid YAML document."""
try:
@@ -76,22 +74,23 @@ class IDLTestcase(unittest.TestCase):
self.fail("Failed to parse document:\n%s" % (doc_str))
def _assert_parse(self, doc_str, parsed_doc):
- # type: (unicode, idl.syntax.IDLParsedSpec) -> None
+ # type: (str, idl.syntax.IDLParsedSpec) -> None
"""Assert a document parsed correctly by the IDL compiler and returned no errors."""
- self.assertIsNone(parsed_doc.errors,
- "Expected no parser errors\nFor document:\n%s\nReceived errors:\n\n%s" %
- (doc_str, errors_to_str(parsed_doc.errors)))
+ self.assertIsNone(
+ parsed_doc.errors,
+ "Expected no parser errors\nFor document:\n%s\nReceived errors:\n\n%s" %
+ (doc_str, errors_to_str(parsed_doc.errors)))
self.assertIsNotNone(parsed_doc.spec, "Expected a parsed doc")
def assert_parse(self, doc_str, resolver=NothingImportResolver()):
- # type: (unicode, idl.parser.ImportResolverBase) -> None
+ # type: (str, idl.parser.ImportResolverBase) -> None
"""Assert a document parsed correctly by the IDL compiler and returned no errors."""
parsed_doc = self._parse(doc_str, resolver)
self._assert_parse(doc_str, parsed_doc)
def assert_parse_fail(self, doc_str, error_id, multiple=False,
resolver=NothingImportResolver()):
- # type: (unicode, unicode, bool, idl.parser.ImportResolverBase) -> None
+ # type: (str, str, bool, idl.parser.ImportResolverBase) -> None
"""
Assert a document parsed correctly by the YAML parser, but not the by the IDL compiler.
@@ -115,22 +114,22 @@ class IDLTestcase(unittest.TestCase):
(doc_str, error_id, errors_to_str(parsed_doc.errors)))
def assert_bind(self, doc_str, resolver=NothingImportResolver()):
- # type: (unicode, idl.parser.ImportResolverBase) -> idl.ast.IDLBoundSpec
+ # type: (str, idl.parser.ImportResolverBase) -> idl.ast.IDLBoundSpec
"""Assert a document parsed and bound correctly by the IDL compiler and returned no errors."""
parsed_doc = self._parse(doc_str, resolver)
self._assert_parse(doc_str, parsed_doc)
bound_doc = idl.binder.bind(parsed_doc.spec)
- self.assertIsNone(bound_doc.errors,
- "Expected no binder errors\nFor document:\n%s\nReceived errors:\n\n%s" %
- (doc_str, errors_to_str(bound_doc.errors)))
+ self.assertIsNone(
+ bound_doc.errors, "Expected no binder errors\nFor document:\n%s\nReceived errors:\n\n%s"
+ % (doc_str, errors_to_str(bound_doc.errors)))
self.assertIsNotNone(bound_doc.spec, "Expected a bound doc")
return bound_doc.spec
def assert_bind_fail(self, doc_str, error_id, resolver=NothingImportResolver()):
- # type: (unicode, unicode, idl.parser.ImportResolverBase) -> None
+ # type: (str, str, idl.parser.ImportResolverBase) -> None
"""
Assert a document parsed correctly by the YAML parser and IDL parser, but not bound by the IDL binder.
@@ -156,7 +155,7 @@ class IDLTestcase(unittest.TestCase):
(doc_str, error_id, errors_to_str(bound_doc.errors)))
def assert_generate(self, doc_str, resolver=NothingImportResolver()):
- # type: (unicode, idl.parser.ImportResolverBase) -> Tuple[unicode,unicode]
+ # type: (str, idl.parser.ImportResolverBase) -> Tuple[str,str]
"""Assert a document parsed, bound, and generated correctly by the IDL compiler."""
spec = self.assert_bind(doc_str, resolver)
diff --git a/buildscripts/jiraclient.py b/buildscripts/jiraclient.py
index 86240865ad8..fd0a3fc43b6 100644
--- a/buildscripts/jiraclient.py
+++ b/buildscripts/jiraclient.py
@@ -1,7 +1,5 @@
"""Module to access a JIRA server."""
-from __future__ import absolute_import
-
import jira
diff --git a/buildscripts/lint.py b/buildscripts/lint.py
index 11bae7fea4f..ece3a08841b 100644
--- a/buildscripts/lint.py
+++ b/buildscripts/lint.py
@@ -1,8 +1,5 @@
"""Lint module."""
-from __future__ import absolute_import
-from __future__ import print_function
-
import codecs
import os
import sys
@@ -35,7 +32,7 @@ class CheckForConfigH(object):
'MONGO_CONFIG define used without prior inclusion of config.h.')
-def run_lint(paths, nudge_on=False):
+def run_lint(paths, nudge_on=False): # pylint: disable=too-many-statements
"""Run lint."""
# errors are as of 10/14
# idea is not to let it any new type of error
diff --git a/buildscripts/linter/base.py b/buildscripts/linter/base.py
index f22f59e4f01..bb22ea7cf80 100644
--- a/buildscripts/linter/base.py
+++ b/buildscripts/linter/base.py
@@ -1,16 +1,12 @@
"""Base class and support functions for linters."""
-from __future__ import absolute_import
-from __future__ import print_function
from abc import ABCMeta, abstractmethod
from typing import Dict, List, Optional
-class LinterBase(object):
+class LinterBase(object, metaclass=ABCMeta):
"""Base Class for all linters."""
- __metaclass__ = ABCMeta
-
def __init__(self, cmd_name, required_version, cmd_location=None):
# type: (str, str, Optional[str]) -> None
"""
diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py
index 9b5f55481d4..4fa15f65907 100644
--- a/buildscripts/linter/git.py
+++ b/buildscripts/linter/git.py
@@ -1,6 +1,4 @@
"""Git Utility functions."""
-from __future__ import absolute_import
-from __future__ import print_function
import itertools
import os
@@ -196,7 +194,7 @@ def get_files_to_check_from_patch(patches, filter_function):
lines = [] # type: List[str]
for patch in patches:
- with open(patch, "rb") as infile:
+ with open(patch, "r") as infile:
lines += infile.readlines()
candidates = [check.match(line).group(1) for line in lines if check.match(line)]
diff --git a/buildscripts/linter/mypy.py b/buildscripts/linter/mypy.py
index c720ae8f870..1189093dd2c 100644
--- a/buildscripts/linter/mypy.py
+++ b/buildscripts/linter/mypy.py
@@ -1,6 +1,4 @@
"""Mypy linter support module."""
-from __future__ import absolute_import
-from __future__ import print_function
import os
from typing import List
@@ -26,7 +24,12 @@ class MypyLinter(base.LinterBase):
def get_lint_cmd_args(self, file_name):
# type: (str) -> List[str]
"""Get the command to run a linter."""
- return [file_name]
+ # Only idl and linter should be type checked by mypy. Other
+ # files return errors under python 3 type checking. If we
+ # return an empty list the runner will skip this file.
+ if 'idl' in file_name or 'linter' in file_name:
+ return [file_name]
+ return []
def ignore_interpreter(self):
# type: () -> bool
diff --git a/buildscripts/linter/parallel.py b/buildscripts/linter/parallel.py
index 0648bfb16e7..b80ec7f2c1b 100644
--- a/buildscripts/linter/parallel.py
+++ b/buildscripts/linter/parallel.py
@@ -1,8 +1,6 @@
"""Utility code to execute code in parallel."""
-from __future__ import absolute_import
-from __future__ import print_function
-import Queue
+import queue
import threading
import time
from multiprocessing import cpu_count
@@ -17,7 +15,7 @@ def parallel_process(items, func):
except NotImplementedError:
cpus = 1
- task_queue = Queue.Queue() # type: Queue.Queue
+ task_queue = queue.Queue() # type: Queue.Queue
# Use a list so that worker function will capture this variable
pp_event = threading.Event()
@@ -30,7 +28,7 @@ def parallel_process(items, func):
while not pp_event.is_set():
try:
item = task_queue.get_nowait()
- except Queue.Empty:
+ except queue.Empty:
# if the queue is empty, exit the worker thread
pp_event.set()
return
diff --git a/buildscripts/linter/pydocstyle.py b/buildscripts/linter/pydocstyle.py
index b259becfd1c..8d6b7dde0c7 100644
--- a/buildscripts/linter/pydocstyle.py
+++ b/buildscripts/linter/pydocstyle.py
@@ -1,6 +1,4 @@
"""PyDocStyle linter support module."""
-from __future__ import absolute_import
-from __future__ import print_function
from typing import List
diff --git a/buildscripts/linter/pylint.py b/buildscripts/linter/pylint.py
index 71a062f9076..58f452d09b6 100644
--- a/buildscripts/linter/pylint.py
+++ b/buildscripts/linter/pylint.py
@@ -1,6 +1,4 @@
"""PyLint linter support module."""
-from __future__ import absolute_import
-from __future__ import print_function
import os
from typing import List
@@ -15,7 +13,7 @@ class PyLintLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a pylint linter."""
- super(PyLintLinter, self).__init__("pylint", "pylint 1.9.3")
+ super(PyLintLinter, self).__init__("pylint", "pylint 2.3.1")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
diff --git a/buildscripts/linter/runner.py b/buildscripts/linter/runner.py
index 83b58dd9884..af2a83de62d 100644
--- a/buildscripts/linter/runner.py
+++ b/buildscripts/linter/runner.py
@@ -1,6 +1,4 @@
"""Class to support running various linters in a common framework."""
-from __future__ import absolute_import
-from __future__ import print_function
import difflib
import logging
@@ -23,19 +21,22 @@ def _check_version(linter, cmd_path, args):
logging.info(str(cmd))
process_handle = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, stderr = process_handle.communicate()
+ output = output.decode('utf-8')
if process_handle.returncode:
- logging.info("Version check failed for [%s], return code '%d'." +
- "Standard Output:\n%s\nStandard Error:\n%s", cmd,
- process_handle.returncode, output, stderr)
+ logging.info(
+ "Version check failed for [%s], return code '%d'."
+ "Standard Output:\n%s\nStandard Error:\n%s", cmd, process_handle.returncode, output,
+ stderr)
required_version = re.escape(linter.required_version)
pattern = r"\b%s\b" % (required_version)
if not re.search(pattern, output):
- logging.info("Linter %s has wrong version for '%s'. Expected '%s'," +
- "Standard Output:\n'%s'\nStandard Error:\n%s", linter.cmd_name, cmd,
- required_version, output, stderr)
+ logging.info(
+ "Linter %s has wrong version for '%s'. Expected '%s',"
+ "Standard Output:\n'%s'\nStandard Error:\n%s", linter.cmd_name, cmd,
+ required_version, output, stderr)
return False
except OSError as os_error:
@@ -126,7 +127,8 @@ def find_linters(linter_list, config_dict):
for linter in linter_list:
linter_instance = _find_linter(linter, config_dict)
if not linter_instance:
- logging.error("""\
+ logging.error(
+ """\
Could not find the correct version of linter '%s', expected '%s'. Check your
PATH environment variable or re-run with --verbose for more information.
@@ -166,16 +168,25 @@ class LintRunner(object):
# type: (base.LinterInstance, str) -> bool
"""Run the specified linter for the file."""
- cmd = linter.cmd_path + linter.linter.get_lint_cmd_args(file_name)
+ cmd = linter.cmd_path
+ cmd += linter.linter.get_lint_cmd_args(file_name)
+ if cmd == linter.cmd_path:
+ # If args is empty it means we didn't get a valid command
+ # to run and so should skip this file.
+ #
+ # For example the MyPy linter class will return empty args
+ # for non-idl files since they shouldn't be type checked.
+ return True
+
logging.debug(str(cmd))
try:
if linter.linter.needs_file_diff():
# Need a file diff
with open(file_name, 'rb') as original_text:
- original_file = original_text.read()
+ original_file = original_text.read().decode('utf-8')
- formatted_file = subprocess.check_output(cmd)
+ formatted_file = subprocess.check_output(cmd).decode('utf-8')
if original_file != formatted_file:
original_lines = original_file.splitlines()
formatted_lines = formatted_file.splitlines()
@@ -196,7 +207,7 @@ class LintRunner(object):
return False
else:
- output = subprocess.check_output(cmd)
+ output = subprocess.check_output(cmd).decode('utf-8')
# On Windows, mypy.bat returns 0 even if there are length failures so we need to
# check if there was any output
@@ -205,7 +216,7 @@ class LintRunner(object):
return False
except subprocess.CalledProcessError as cpe:
- self._safe_print("CMD [%s] failed:\n%s" % (cmd, cpe.output))
+ self._safe_print("CMD [%s] failed:\n%s" % (cmd, cpe.output.decode('utf-8')))
return False
return True
@@ -217,7 +228,7 @@ class LintRunner(object):
logging.debug(str(cmd))
try:
- subprocess.check_output(cmd)
+ subprocess.check_output(cmd).decode('utf-8')
except subprocess.CalledProcessError as cpe:
self._safe_print("CMD [%s] failed:\n%s" % (cmd, cpe.output))
return False
diff --git a/buildscripts/linter/yapf.py b/buildscripts/linter/yapf.py
index 1ea3da7bae3..d787810a0da 100644
--- a/buildscripts/linter/yapf.py
+++ b/buildscripts/linter/yapf.py
@@ -1,6 +1,4 @@
"""YAPF linter support module."""
-from __future__ import absolute_import
-from __future__ import print_function
from typing import List
@@ -13,7 +11,7 @@ class YapfLinter(base.LinterBase):
def __init__(self):
# type: () -> None
"""Create a yapf linter."""
- super(YapfLinter, self).__init__("yapf", "yapf 0.21.0")
+ super(YapfLinter, self).__init__("yapf", "yapf 0.26.0")
def get_lint_version_cmd_args(self):
# type: () -> List[str]
diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py
index a2681a0efdc..ad0fa9e93ae 100755
--- a/buildscripts/make_archive.py
+++ b/buildscripts/make_archive.py
@@ -96,14 +96,14 @@ def make_tar_archive(opts):
enclosing_file_directory = os.path.dirname(temp_file_location)
if not os.path.exists(enclosing_file_directory):
os.makedirs(enclosing_file_directory)
- print "copying %s => %s" % (input_filename, temp_file_location)
+ print("copying %s => %s" % (input_filename, temp_file_location))
if os.path.isdir(input_filename):
shutil.copytree(input_filename, temp_file_location)
else:
shutil.copy2(input_filename, temp_file_location)
tar_command.append(preferred_filename)
- print " ".join(tar_command)
+ print(" ".join(tar_command))
# execute the full tar command
run_directory = os.path.join(os.getcwd(), enclosing_archive_directory)
proc = Popen(tar_command, stdout=PIPE, stderr=STDOUT, bufsize=0, cwd=run_directory)
@@ -137,10 +137,10 @@ def parse_options(args):
parser = optparse.OptionParser()
parser.add_option('-o', dest='output_filename', default=None,
help='Name of the archive to output.', metavar='FILE')
- parser.add_option('--format', dest='archive_format', default=None, choices=('zip', 'tar',
- 'tgz'),
- help=('Format of archive to create. '
- 'If omitted, use the suffix of the output filename to decide.'))
+ parser.add_option(
+ '--format', dest='archive_format', default=None, choices=('zip', 'tar', 'tgz'),
+ help=('Format of archive to create. '
+ 'If omitted, use the suffix of the output filename to decide.'))
parser.add_option('--transform', action='append', dest='transformations', default=[])
(opts, input_filenames) = parser.parse_args(args)
@@ -171,7 +171,7 @@ def parse_options(args):
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=', 1)
for xform in opts.transformations
]
- except Exception, err: # pylint: disable=broad-except
+ except Exception as err: # pylint: disable=broad-except
parser.error(err)
return opts
diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py
index 48c8e73f17f..6b8fc04730a 100644
--- a/buildscripts/make_vcxproj.py
+++ b/buildscripts/make_vcxproj.py
@@ -12,13 +12,12 @@ To build mongodb, you must use scons. You can use this project to navigate code
where FILE_NAME is the of the file to generate e.g., "mongod"
"""
-from __future__ import absolute_import, print_function
import io
import json
import os
import re
-import StringIO
+import io
import sys
import uuid
import xml.etree.ElementTree as ET
@@ -111,7 +110,7 @@ def _replace_vcxproj(file_name, restore_elements):
saved_value = restore_elements[(parent.tag, child.tag, cond)]
child.text = saved_value
- stream = StringIO.StringIO()
+ stream = io.StringIO()
tree.write(stream)
@@ -173,10 +172,10 @@ class ProjFileGenerator(object): # pylint: disable=too-many-instance-attributes
for command in self.compiles:
defines = command["defines"].difference(common_defines)
if defines:
- self.vcxproj.write(
- " <ClCompile Include=\"" + command["file"] + "\"><PreprocessorDefinitions>" +
- ';'.join(defines) + ";%(PreprocessorDefinitions)" +
- "</PreprocessorDefinitions></ClCompile>\n")
+ self.vcxproj.write(" <ClCompile Include=\"" + command["file"] +
+ "\"><PreprocessorDefinitions>" + ';'.join(defines) +
+ ";%(PreprocessorDefinitions)" +
+ "</PreprocessorDefinitions></ClCompile>\n")
else:
self.vcxproj.write(" <ClCompile Include=\"" + command["file"] + "\" />\n")
self.vcxproj.write(" </ItemGroup>\n")
diff --git a/buildscripts/mobile/adb_monitor.py b/buildscripts/mobile/adb_monitor.py
index d33a37e08d8..1f7c350f309 100644
--- a/buildscripts/mobile/adb_monitor.py
+++ b/buildscripts/mobile/adb_monitor.py
@@ -132,14 +132,14 @@ class Adb(object):
def systrace_stop(self, output_file=None):
"""Stop the systrace.py script."""
- self._cmd.send_to_process("bye")
+ self._cmd.send_to_process(b"bye")
with open(self._tempfile) as fh:
buff = fh.read()
os.remove(self._tempfile)
self.logger.debug("systrace_stop: %s", buff)
if "Wrote trace" not in buff:
self.logger.error("CPU file not saved: %s", buff)
- if os.path.isfile(output_file):
+ if output_file and os.path.isfile(output_file):
os.remove(output_file)
@@ -410,8 +410,9 @@ def main(): #pylint: disable=too-many-statements
output_files[options.cpu_file] = fileops.getmtime(options.cpu_file)
LOGGER.setLevel(options.log_level.upper())
- LOGGER.info("This program can be cleanly terminated by issuing the following command:"
- "\n\t\t'kill -INT %d'", os.getpid())
+ LOGGER.info(
+ "This program can be cleanly terminated by issuing the following command:"
+ "\n\t\t'kill -INT %d'", os.getpid())
adb = Adb(options.adb_binary)
LOGGER.info("Detected devices by adb:\n%s%s", adb.devices(), adb.device_available())
diff --git a/buildscripts/mobile/benchrun_embedded_setup_android.py b/buildscripts/mobile/benchrun_embedded_setup_android.py
index 92f18d334a1..94a08077f93 100644
--- a/buildscripts/mobile/benchrun_embedded_setup_android.py
+++ b/buildscripts/mobile/benchrun_embedded_setup_android.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Setup an Android device to run the benchrun_embedded test suite."""
-from __future__ import print_function
-
import glob
import logging
import optparse
@@ -13,7 +11,7 @@ import sys
import tarfile
import tempfile
import time
-import urllib
+import urllib.request, urllib.parse, urllib.error
# pylint: disable=wrong-import-position
# Get relative imports to work when the package is not installed on the PYTHONPATH.
@@ -30,7 +28,7 @@ def download_and_untar(url, root_dir):
"""Download url and untar into root_dir."""
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".tgz").name
LOGGER.info("Downloading %s", url)
- urllib.urlretrieve(url, temp_file)
+ urllib.request.urlretrieve(url, temp_file)
with tarfile.open(temp_file, "r:gz") as tar:
tar.extractall(root_dir)
os.remove(temp_file)
@@ -130,10 +128,10 @@ def main():
help="The remote directory to store the embedded SDK files. Defaults to '%default'.",
default=posixpath.join(benchrun_root, "sdk"))
- device_options.add_option("--benchrunJsonRemoteDir", dest="json_remote_dir",
- help="The remote directory to store the benchrun JSON files."
- " Defaults to '%default'.", default=posixpath.join(
- benchrun_root, "testcases"))
+ device_options.add_option(
+ "--benchrunJsonRemoteDir", dest="json_remote_dir",
+ help="The remote directory to store the benchrun JSON files."
+ " Defaults to '%default'.", default=posixpath.join(benchrun_root, "testcases"))
sdk_url = "https://s3.amazonaws.com/mciuploads/mongodb-mongo-master/embedded-sdk-test/embedded-sdk-android-arm64-latest.tgz"
sdk_options.add_option(
@@ -142,9 +140,10 @@ def main():
" any required shared object (.so) libraries. Defaults to '%default'."),
default=sdk_url)
- sdk_options.add_option("--sdkLocalDir", dest="sdk_local_dir",
- help="The local directory of embedded SDK files to be copied."
- "If specified, overrides --sdkUrl.", default=None)
+ sdk_options.add_option(
+ "--sdkLocalDir", dest="sdk_local_dir",
+ help="The local directory of embedded SDK files to be copied."
+ "If specified, overrides --sdkUrl.", default=None)
sdk_options.add_option(
"--sdkSaveLocalDir", dest="sdk_save_local_dir",
@@ -159,9 +158,10 @@ def main():
" files to be used in the benchrun embedded test."
" Defaults to '%default'."), default=json_url)
- json_options.add_option("--benchrunJsonLocalDir", dest="json_local_dir",
- help="The local directory of benchrun JSON files to be copied."
- "If specified, overrides --benchrunJsonUrl.", default=None)
+ json_options.add_option(
+ "--benchrunJsonLocalDir", dest="json_local_dir",
+ help="The local directory of benchrun JSON files to be copied."
+ "If specified, overrides --benchrunJsonUrl.", default=None)
json_options.add_option(
"--benchrunJsonSaveLocalDir", dest="json_save_local_dir",
diff --git a/buildscripts/moduleconfig.py b/buildscripts/moduleconfig.py
index c1a9ae27f87..95554237339 100644
--- a/buildscripts/moduleconfig.py
+++ b/buildscripts/moduleconfig.py
@@ -23,7 +23,6 @@ alter those programs' behavior.
MongoDB module SConscript files can describe libraries, programs and unit tests, just as other
MongoDB SConscript files do.
"""
-from __future__ import print_function
__all__ = ('discover_modules', 'discover_module_directories', 'configure_modules',
'register_module_test') # pylint: disable=undefined-all-variable
diff --git a/buildscripts/mongosymb.py b/buildscripts/mongosymb.py
index bf11ff872c5..cfd6c4f3fbd 100755
--- a/buildscripts/mongosymb.py
+++ b/buildscripts/mongosymb.py
@@ -38,7 +38,7 @@ def symbolize_frames( # pylint: disable=too-many-locals
The somap_list is a list of dictionaries describing individual loaded libraries.
"""
- return {so_entry["b"]: so_entry for so_entry in somap_list if so_entry.has_key("b")}
+ return {so_entry["b"]: so_entry for so_entry in somap_list if "b" in so_entry}
base_addr_map = make_base_addr_map(trace_doc["processInfo"]["somap"])
@@ -52,7 +52,7 @@ def symbolize_frames( # pylint: disable=too-many-locals
addr_base = frame["b"]
else:
addr_base = soinfo.get("vmaddr", "0")
- addr = long(addr_base, 16) + long(frame["o"], 16)
+ addr = int(addr_base, 16) + int(frame["o"], 16)
# addr currently points to the return address which is the one *after* the call. x86 is
# variable length so going backwards is difficult. However llvm-symbolizer seems to do the
# right thing if we just subtract 1 byte here. This has the downside of also adjusting the
@@ -152,8 +152,8 @@ class S3BuildidDbgFileResolver(object):
"""Download debug symbols from S3."""
subprocess.check_call(
['wget',
- 'https://s3.amazonaws.com/%s/%s.debug.gz' %
- (self._s3_bucket, build_id)], cwd=self._cache_dir)
+ 'https://s3.amazonaws.com/%s/%s.debug.gz' % (self._s3_bucket, build_id)],
+ cwd=self._cache_dir)
subprocess.check_call(['gunzip', build_id + ".debug.gz"], cwd=self._cache_dir)
diff --git a/buildscripts/msitrim.py b/buildscripts/msitrim.py
index 2e6473f535d..8e06fd0e395 100644
--- a/buildscripts/msitrim.py
+++ b/buildscripts/msitrim.py
@@ -1,7 +1,5 @@
"""Script to fix up our MSI files."""
-from __future__ import print_function
-
import argparse
import shutil
diff --git a/buildscripts/packager.py b/buildscripts/packager.py
index 5a753f6e1c1..ee2d62dc11d 100755
--- a/buildscripts/packager.py
+++ b/buildscripts/packager.py
@@ -249,14 +249,11 @@ class Distro(object):
self.dname, self.repo_os_version(build_os), repo_directory, self.repo_component(),
self.archname(arch))
elif re.search("(redhat|fedora|centos|amazon)", self.dname):
- return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
- self.repo_os_version(build_os),
- repo_directory, self.archname(arch))
+ return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (
+ self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
elif re.search("(suse)", self.dname):
- return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
- self.repo_os_version(build_os),
- repo_directory,
- self.archname(arch))
+ return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (
+ self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
else:
raise Exception("BUG: unsupported platform?")
@@ -409,7 +406,7 @@ def main():
prefix = args.prefix
if prefix is None:
prefix = tempfile.mkdtemp()
- print "Working in directory %s" % prefix
+ print("Working in directory %s" % prefix)
os.chdir(prefix)
try:
@@ -449,7 +446,7 @@ def crossproduct(*seqs):
def sysassert(argv):
"""Run argv and assert that it exited with status 0."""
- print "In %s, running %s" % (os.getcwd(), " ".join(argv))
+ print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
sys.stdout.flush()
sys.stderr.flush()
assert subprocess.Popen(argv).wait() == 0
@@ -457,7 +454,7 @@ def sysassert(argv):
def backtick(argv):
"""Run argv and return its output string."""
- print "In %s, running %s" % (os.getcwd(), " ".join(argv))
+ print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
sys.stdout.flush()
sys.stderr.flush()
return subprocess.Popen(argv, stdout=subprocess.PIPE).communicate()[0]
@@ -493,11 +490,11 @@ def unpack_binaries_into(build_os, arch, spec, where):
sysassert(["tar", "xvzf", rootdir + "/" + tarfile(build_os, arch, spec)])
release_dir = glob('mongodb-linux-*')[0]
for releasefile in "bin", "LICENSE-Community.txt", "README", "THIRD-PARTY-NOTICES", "THIRD-PARTY-NOTICES.gotools", "MPL-2":
- print "moving file: %s/%s" % (release_dir, releasefile)
+ print("moving file: %s/%s" % (release_dir, releasefile))
os.rename("%s/%s" % (release_dir, releasefile), releasefile)
os.rmdir(release_dir)
except Exception:
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
os.chdir(rootdir)
raise exc
os.chdir(rootdir)
@@ -515,7 +512,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
# directory, so the debian directory is needed in all cases (and
# innocuous in the debianoids' sdirs).
for pkgdir in ["debian", "rpm"]:
- print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
+ print("Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir))
# FIXME: sh-dash-cee is bad. See if tarfile can do this.
sysassert([
"sh", "-c",
@@ -609,11 +606,13 @@ def make_deb_repo(repo, distro, build_os):
oldpwd = os.getcwd()
os.chdir(repo + "../../../../../../")
try:
- dirs = set(
- [os.path.dirname(deb)[2:] for deb in backtick(["find", ".", "-name", "*.deb"]).split()])
+ dirs = set([
+ os.path.dirname(deb)[2:]
+ for deb in backtick(["find", ".", "-name", "*.deb"]).decode('utf-8').split()
+ ])
for directory in dirs:
st = backtick(["dpkg-scanpackages", directory, "/dev/null"])
- with open(directory + "/Packages", "w") as fh:
+ with open(directory + "/Packages", "wb") as fh:
fh.write(st)
bt = backtick(["gzip", "-9c", directory + "/Packages"])
with open(directory + "/Packages.gz", "wb") as fh:
@@ -639,8 +638,8 @@ Description: MongoDB packages
os.chdir(repo + "../../")
s2 = backtick(["apt-ftparchive", "release", "."])
try:
- with open("Release", 'w') as fh:
- fh.write(s1)
+ with open("Release", 'wb') as fh:
+ fh.write(s1.encode('utf-8'))
fh.write(s2)
finally:
os.chdir(oldpwd)
@@ -662,7 +661,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.mkdir(dname)
break
except OSError:
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
@@ -682,7 +681,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.symlink(dname, tmpnam)
break
except OSError: # as exc: # Python >2.5
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
@@ -700,7 +699,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.symlink(os.readlink(dst), oldnam)
break
except OSError: # as exc: # Python >2.5
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
@@ -717,9 +716,10 @@ def write_debian_changelog(path, spec, srcdir):
os.chdir(srcdir)
preamble = ""
try:
- sb = preamble + backtick(
- ["sh", "-c",
- "git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()])
+ sb = preamble + backtick([
+ "sh", "-c",
+ "git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()
+ ]).decode('utf-8')
finally:
os.chdir(oldcwd)
lines = sb.split("\n")
@@ -789,7 +789,8 @@ def make_rpm(distro, build_os, arch, spec, srcdir): # pylint: disable=too-many-
# --macros will be used in Ubuntu.
#
macrofiles = [
- l for l in backtick(["rpm", "--showrc"]).split("\n") if l.startswith("macrofiles")
+ l for l in backtick(["rpm", "--showrc"]).decode('utf-8').split("\n")
+ if l.startswith("macrofiles")
]
flags = []
macropath = os.getcwd() + "/macros"
@@ -877,7 +878,7 @@ def ensure_dir(filename):
try:
os.makedirs(dirpart)
except OSError: # as exc: # Python >2.5
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
diff --git a/buildscripts/packager_enterprise.py b/buildscripts/packager_enterprise.py
index ea7b65052f6..acc7524ed7d 100755
--- a/buildscripts/packager_enterprise.py
+++ b/buildscripts/packager_enterprise.py
@@ -36,7 +36,9 @@ import sys
import tempfile
import time
-import packager # pylint: disable=relative-import
+sys.path.append(os.getcwd())
+
+import packager
# The MongoDB names for the architectures we support.
ARCH_CHOICES = ["x86_64", "ppc64le", "s390x", "arm64"]
@@ -161,7 +163,7 @@ def main():
if prefix is None:
prefix = tempfile.mkdtemp()
- print "Working in directory %s" % prefix
+ print("Working in directory %s" % prefix)
os.chdir(prefix)
try:
@@ -222,7 +224,7 @@ def unpack_binaries_into(build_os, arch, spec, where):
os.rename("%s/%s" % (release_dir, releasefile), releasefile)
os.rmdir(release_dir)
except Exception:
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
os.chdir(rootdir)
raise exc
os.chdir(rootdir)
@@ -240,7 +242,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
# directory, so the debian directory is needed in all cases (and
# innocuous in the debianoids' sdirs).
for pkgdir in ["debian", "rpm"]:
- print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
+ print("Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir))
# FIXME: sh-dash-cee is bad. See if tarfile can do this.
packager.sysassert([
"sh", "-c",
@@ -277,11 +279,11 @@ def make_deb_repo(repo, distro, build_os):
try:
dirs = set([
os.path.dirname(deb)[2:]
- for deb in packager.backtick(["find", ".", "-name", "*.deb"]).split()
+ for deb in packager.backtick(["find", ".", "-name", "*.deb"]).decode('utf-8').split()
])
for directory in dirs:
st = packager.backtick(["dpkg-scanpackages", directory, "/dev/null"])
- with open(directory + "/Packages", "w") as fh:
+ with open(directory + "/Packages", "wb") as fh:
fh.write(st)
bt = packager.backtick(["gzip", "-9c", directory + "/Packages"])
with open(directory + "/Packages.gz", "wb") as fh:
@@ -307,8 +309,8 @@ Description: MongoDB packages
os.chdir(repo + "../../")
s2 = packager.backtick(["apt-ftparchive", "release", "."])
try:
- with open("Release", 'w') as fh:
- fh.write(s1)
+ with open("Release", 'wb') as fh:
+ fh.write(s1.encode('utf-8'))
fh.write(s2)
finally:
os.chdir(oldpwd)
@@ -330,7 +332,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.mkdir(dname)
break
except OSError:
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
@@ -350,7 +352,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.symlink(dname, tmpnam)
break
except OSError: # as exc: # Python >2.5
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
@@ -368,7 +370,7 @@ def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
os.symlink(os.readlink(dst), oldnam)
break
except OSError: # as exc: # Python >2.5
- exc = sys.exc_value
+ exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST:
pass
else:
diff --git a/buildscripts/promote_silent_failures.py b/buildscripts/promote_silent_failures.py
index cf1ddab7c7a..b089e0901f9 100644
--- a/buildscripts/promote_silent_failures.py
+++ b/buildscripts/promote_silent_failures.py
@@ -5,9 +5,6 @@ Any test files with at least 2 executions in the report.json file that have a "s
this script will change the outputted report to have a "fail" status instead.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-
import collections
import json
import optparse
@@ -31,10 +28,11 @@ def main():
usage = "usage: %prog [options] report.json"
parser = optparse.OptionParser(usage=usage)
- parser.add_option("-o", "--output-file", dest="outfile", default="-",
- help=("If '-', then the report file is written to stdout."
- " Any other value is treated as the output file name. By default,"
- " output is written to stdout."))
+ parser.add_option(
+ "-o", "--output-file", dest="outfile", default="-",
+ help=("If '-', then the report file is written to stdout."
+ " Any other value is treated as the output file name. By default,"
+ " output is written to stdout."))
(options, args) = parser.parse_args()
diff --git a/buildscripts/pylinters.py b/buildscripts/pylinters.py
index 16e987f0da8..80629ea84d8 100755
--- a/buildscripts/pylinters.py
+++ b/buildscripts/pylinters.py
@@ -1,7 +1,5 @@
#!/usr/bin/env python2
"""Extensible script to run one or more Python Linters across a subset of files in parallel."""
-from __future__ import absolute_import
-from __future__ import print_function
import argparse
import logging
@@ -135,7 +133,8 @@ def _fix_files(linters, config_dict, file_names):
sys.exit(1)
for linter in linter_instances:
- run_linter = lambda param1: lint_runner.run(linter.cmd_path + linter.linter.get_fix_cmd_args(param1)) # pylint: disable=cell-var-from-loop
+ run_linter = lambda param1: lint_runner.run(linter.cmd_path + linter.linter. # pylint: disable=cell-var-from-loop
+ get_fix_cmd_args(param1)) # pylint: disable=cell-var-from-loop
lint_clean = parallel.parallel_process([os.path.abspath(f) for f in file_names], run_linter)
diff --git a/buildscripts/remote_operations.py b/buildscripts/remote_operations.py
index 93798ae7ac1..29b1ae778df 100755
--- a/buildscripts/remote_operations.py
+++ b/buildscripts/remote_operations.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Remote access utilities, via ssh & scp."""
-from __future__ import print_function
-
import optparse
import os
import posixpath
@@ -10,20 +8,7 @@ import re
import shlex
import sys
import time
-
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess # type: ignore
-else:
- import subprocess
+import subprocess
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
@@ -231,44 +216,50 @@ def main(): # pylint: disable=too-many-branches,too-many-statements
shell_options = optparse.OptionGroup(parser, "Shell options")
copy_options = optparse.OptionGroup(parser, "Copy options")
- parser.add_option("--userHost", dest="user_host", default=None,
- help=("User and remote host to execute commands on [REQUIRED]."
- " Examples, 'user@1.2.3.4' or 'user@myhost.com'."))
-
- parser.add_option("--operation", dest="operation", default="shell", choices=_OPERATIONS,
- help=("Remote operation to perform, choose one of '{}',"
- " defaults to '%default'.".format(", ".join(_OPERATIONS))))
-
- control_options.add_option("--sshConnectionOptions", dest="ssh_connection_options",
- default=None, action="append",
- help=("SSH connection options which are common to ssh and scp."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-i $HOME/.ssh/access.pem -o ConnectTimeout=10"
- " -o ConnectionAttempts=10'"))
-
- control_options.add_option("--sshOptions", dest="ssh_options", default=None, action="append",
- help=("SSH specific options."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-t' or '-T'"))
-
- control_options.add_option("--scpOptions", dest="scp_options", default=None, action="append",
- help=("SCP specific options."
- " More than one option can be specified either"
- " in one quoted string or by specifying"
- " this option more than once. Example options:"
- " '-l 5000'"))
-
- control_options.add_option("--retries", dest="retries", type=int, default=0,
- help=("Number of retries to attempt for operation,"
- " defaults to '%default'."))
-
- control_options.add_option("--retrySleep", dest="retry_sleep", type=int, default=10,
- help=("Number of seconds to wait between retries,"
- " defaults to '%default'."))
+ parser.add_option(
+ "--userHost", dest="user_host", default=None,
+ help=("User and remote host to execute commands on [REQUIRED]."
+ " Examples, 'user@1.2.3.4' or 'user@myhost.com'."))
+
+ parser.add_option(
+ "--operation", dest="operation", default="shell", choices=_OPERATIONS,
+ help=("Remote operation to perform, choose one of '{}',"
+ " defaults to '%default'.".format(", ".join(_OPERATIONS))))
+
+ control_options.add_option(
+ "--sshConnectionOptions", dest="ssh_connection_options", default=None, action="append",
+ help=("SSH connection options which are common to ssh and scp."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-i $HOME/.ssh/access.pem -o ConnectTimeout=10"
+ " -o ConnectionAttempts=10'"))
+
+ control_options.add_option(
+ "--sshOptions", dest="ssh_options", default=None, action="append",
+ help=("SSH specific options."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-t' or '-T'"))
+
+ control_options.add_option(
+ "--scpOptions", dest="scp_options", default=None, action="append",
+ help=("SCP specific options."
+ " More than one option can be specified either"
+ " in one quoted string or by specifying"
+ " this option more than once. Example options:"
+ " '-l 5000'"))
+
+ control_options.add_option(
+ "--retries", dest="retries", type=int, default=0,
+ help=("Number of retries to attempt for operation,"
+ " defaults to '%default'."))
+
+ control_options.add_option(
+ "--retrySleep", dest="retry_sleep", type=int, default=10,
+ help=("Number of seconds to wait between retries,"
+ " defaults to '%default'."))
control_options.add_option("--debug", dest="debug", action="store_true", default=False,
help="Provides debug output.")
@@ -276,32 +267,37 @@ def main(): # pylint: disable=too-many-branches,too-many-statements
control_options.add_option("--verbose", dest="verbose", action="store_true", default=False,
help="Print exit status and output at end.")
- shell_options.add_option("--commands", dest="remote_commands", default=None, action="append",
- help=("Commands to excute on the remote host. The"
- " commands must be separated by a ';' and can either"
- " be specifed in a quoted string or by specifying"
- " this option more than once. A ';' will be added"
- " between commands when this option is specifed"
- " more than once."))
-
- shell_options.add_option("--commandDir", dest="command_dir", default=None,
- help=("Working directory on remote to execute commands"
- " form. Defaults to remote login directory."))
-
- copy_options.add_option("--file", dest="files", default=None, action="append",
- help=("The file to copy to/from remote host. To"
- " support spaces in the file, each file must be"
- " specified using this option more than once."))
-
- copy_options.add_option("--remoteDir", dest="remote_dir", default=None,
- help=("Remote directory to copy to, only applies when"
- " operation is 'copy_to'. Defaults to the login"
- " directory on the remote host."))
-
- copy_options.add_option("--localDir", dest="local_dir", default=".",
- help=("Local directory to copy to, only applies when"
- " operation is 'copy_from'. Defaults to the"
- " current directory, '%default'."))
+ shell_options.add_option(
+ "--commands", dest="remote_commands", default=None, action="append",
+ help=("Commands to excute on the remote host. The"
+ " commands must be separated by a ';' and can either"
+ " be specifed in a quoted string or by specifying"
+ " this option more than once. A ';' will be added"
+ " between commands when this option is specifed"
+ " more than once."))
+
+ shell_options.add_option(
+ "--commandDir", dest="command_dir", default=None,
+ help=("Working directory on remote to execute commands"
+ " form. Defaults to remote login directory."))
+
+ copy_options.add_option(
+ "--file", dest="files", default=None, action="append",
+ help=("The file to copy to/from remote host. To"
+ " support spaces in the file, each file must be"
+ " specified using this option more than once."))
+
+ copy_options.add_option(
+ "--remoteDir", dest="remote_dir", default=None,
+ help=("Remote directory to copy to, only applies when"
+ " operation is 'copy_to'. Defaults to the login"
+ " directory on the remote host."))
+
+ copy_options.add_option(
+ "--localDir", dest="local_dir", default=".",
+ help=("Local directory to copy to, only applies when"
+ " operation is 'copy_from'. Defaults to the"
+ " current directory, '%default'."))
parser.add_option_group(control_options)
parser.add_option_group(shell_options)
diff --git a/buildscripts/resmoke.py b/buildscripts/resmoke.py
index b8afbeec861..25ab3c88ab1 100755
--- a/buildscripts/resmoke.py
+++ b/buildscripts/resmoke.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Command line utility for executing MongoDB tests of all kinds."""
-from __future__ import absolute_import
-
import os.path
import platform
import random
@@ -331,7 +329,8 @@ class Resmoke(object): # pylint: disable=too-many-instance-attributes
curator_exists = os.path.isfile(curator_path)
curator_same_version = False
if curator_exists:
- curator_version = subprocess.check_output([curator_path, "--version"]).split()
+ curator_version = subprocess.check_output([curator_path,
+ "--version"]).decode('utf-8').split()
curator_same_version = git_hash in curator_version
if curator_exists and not curator_same_version:
diff --git a/buildscripts/resmokeconfig/__init__.py b/buildscripts/resmokeconfig/__init__.py
index b3462adcc2c..03aa978b803 100644
--- a/buildscripts/resmokeconfig/__init__.py
+++ b/buildscripts/resmokeconfig/__init__.py
@@ -1,5 +1,4 @@
"""Resmokeconfig module."""
-from __future__ import absolute_import
from .suites import NAMED_SUITES
from .loggers import NAMED_LOGGERS
diff --git a/buildscripts/resmokeconfig/loggers/__init__.py b/buildscripts/resmokeconfig/loggers/__init__.py
index 5342639c567..753d7892023 100644
--- a/buildscripts/resmokeconfig/loggers/__init__.py
+++ b/buildscripts/resmokeconfig/loggers/__init__.py
@@ -1,7 +1,5 @@
"""Defines a mapping of shortened names for logger configuration files to their full path."""
-from __future__ import absolute_import
-
import os
import os.path
@@ -16,7 +14,7 @@ def _get_named_loggers():
named_loggers = {}
try:
- (root, _dirs, files) = os.walk(dirname).next()
+ (root, _dirs, files) = next(os.walk(dirname))
for filename in files:
(short_name, ext) = os.path.splitext(filename)
if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokeconfig/suites/__init__.py b/buildscripts/resmokeconfig/suites/__init__.py
index 87d378616bc..cff4c39218a 100644
--- a/buildscripts/resmokeconfig/suites/__init__.py
+++ b/buildscripts/resmokeconfig/suites/__init__.py
@@ -1,7 +1,5 @@
"""Defines a mapping of shortened names for suite configuration files to their full path."""
-from __future__ import absolute_import
-
import os
import os.path
@@ -16,7 +14,7 @@ def _get_named_suites():
named_suites = {}
try:
- (root, _dirs, files) = os.walk(dirname).next()
+ (root, _dirs, files) = next(os.walk(dirname))
for filename in files:
(short_name, ext) = os.path.splitext(filename)
if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokelib/__init__.py b/buildscripts/resmokelib/__init__.py
index c6a0bc9d079..e24f7425280 100644
--- a/buildscripts/resmokelib/__init__.py
+++ b/buildscripts/resmokelib/__init__.py
@@ -1,7 +1,5 @@
"""Empty."""
-from __future__ import absolute_import
-
from . import config
from . import errors
from . import logging
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
index 50af5c5ccdd..7705682cce1 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -1,7 +1,5 @@
"""Configuration options for resmoke.py."""
-from __future__ import absolute_import
-
import collections
import datetime
import itertools
@@ -75,7 +73,7 @@ DEFAULTS = {
"repeat_tests_secs": None,
"report_failure_status": "fail",
"report_file": None,
- "seed": long(time.time() * 256), # Taken from random.py code in Python 2.7.
+ "seed": int(time.time() * 256), # Taken from random.py code in Python 2.7.
"service_executor": None,
"shell_conn_string": None,
"shell_port": None,
@@ -181,18 +179,19 @@ class SuiteOptions(_SuiteOptions):
description = None
include_tags = None
parent = dict(
- zip(SuiteOptions._fields, [
- description,
- FAIL_FAST,
- include_tags,
- JOBS,
- REPEAT_SUITES,
- REPEAT_TESTS,
- REPEAT_TESTS_MAX,
- REPEAT_TESTS_MIN,
- REPEAT_TESTS_SECS,
- REPORT_FAILURE_STATUS,
- ]))
+ list(
+ zip(SuiteOptions._fields, [
+ description,
+ FAIL_FAST,
+ include_tags,
+ JOBS,
+ REPEAT_SUITES,
+ REPEAT_TESTS,
+ REPEAT_TESTS_MAX,
+ REPEAT_TESTS_MIN,
+ REPEAT_TESTS_SECS,
+ REPORT_FAILURE_STATUS,
+ ])))
options = self._asdict()
for field in SuiteOptions._fields:
@@ -203,7 +202,7 @@ class SuiteOptions(_SuiteOptions):
SuiteOptions.ALL_INHERITED = SuiteOptions( # type: ignore
- **dict(zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
+ **dict(list(zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT)))))
##
# Variables that are set by the user at the command line or with --options.
diff --git a/buildscripts/resmokelib/core/__init__.py b/buildscripts/resmokelib/core/__init__.py
index 78a8b4924b8..5f780a34e06 100644
--- a/buildscripts/resmokelib/core/__init__.py
+++ b/buildscripts/resmokelib/core/__init__.py
@@ -1,5 +1,4 @@
"""Resmokelib core module."""
-from __future__ import absolute_import
from . import process
from . import programs
diff --git a/buildscripts/resmokelib/core/jasper_process.py b/buildscripts/resmokelib/core/jasper_process.py
index 60c349743ff..fc4ef79fa9c 100644
--- a/buildscripts/resmokelib/core/jasper_process.py
+++ b/buildscripts/resmokelib/core/jasper_process.py
@@ -3,8 +3,6 @@
Serves as an alternative to process.py.
"""
-from __future__ import absolute_import
-
import sys
try:
diff --git a/buildscripts/resmokelib/core/network.py b/buildscripts/resmokelib/core/network.py
index b48a2221f01..e76e73bbb08 100644
--- a/buildscripts/resmokelib/core/network.py
+++ b/buildscripts/resmokelib/core/network.py
@@ -1,7 +1,5 @@
"""Class used to allocate ports for mongod and mongos processes involved in running the tests."""
-from __future__ import absolute_import
-
import collections
import functools
import threading
diff --git a/buildscripts/resmokelib/core/pipe.py b/buildscripts/resmokelib/core/pipe.py
index 578b934bb7d..c52dbc1235a 100644
--- a/buildscripts/resmokelib/core/pipe.py
+++ b/buildscripts/resmokelib/core/pipe.py
@@ -5,8 +5,6 @@ Used to avoid deadlocks from the pipe buffer filling up and blocking the subproc
being waited on.
"""
-from __future__ import absolute_import
-
import threading
diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py
index 84c067d8e3b..4b8e0f25d0a 100644
--- a/buildscripts/resmokelib/core/process.py
+++ b/buildscripts/resmokelib/core/process.py
@@ -4,34 +4,13 @@ Uses job objects when running on Windows to ensure that all created
processes are terminated.
"""
-from __future__ import absolute_import
-
import atexit
import logging
import os
import os.path
import sys
import threading
-
-# The subprocess32 module resolves the thread-safety issues of the subprocess module in Python 2.x
-# when the _posixsubprocess C extension module is also available. Additionally, the _posixsubprocess
-# C extension module avoids triggering invalid free() calls on Python's internal data structure for
-# thread-local storage by skipping the PyOS_AfterFork() call when the 'preexec_fn' parameter isn't
-# specified to subprocess.Popen(). See SERVER-22219 for more details.
-#
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
- import subprocess # type: ignore
-else:
- import subprocess
+import subprocess
from . import pipe # pylint: disable=wrong-import-position
from .. import utils # pylint: disable=wrong-import-position
@@ -182,8 +161,8 @@ class Process(object):
finally:
win32api.CloseHandle(mongo_signal_handle)
- print "Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\
- str(self._process.pid)
+ print("Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\
+ str(self._process.pid))
# Adapted from implementation of Popen.terminate() in subprocess.py of Python 2.7
# because earlier versions do not catch exceptions.
diff --git a/buildscripts/resmokelib/core/programs.py b/buildscripts/resmokelib/core/programs.py
index d1c0b4ca03e..15db85721eb 100644
--- a/buildscripts/resmokelib/core/programs.py
+++ b/buildscripts/resmokelib/core/programs.py
@@ -3,8 +3,6 @@
Handles all the nitty-gritty parameter conversion.
"""
-from __future__ import absolute_import
-
import json
import os
import os.path
diff --git a/buildscripts/resmokelib/logging/__init__.py b/buildscripts/resmokelib/logging/__init__.py
index d0b4a48ac57..2d527b91434 100644
--- a/buildscripts/resmokelib/logging/__init__.py
+++ b/buildscripts/resmokelib/logging/__init__.py
@@ -1,7 +1,5 @@
"""Extension to the logging package to support buildlogger."""
-from __future__ import absolute_import
-
# Alias the built-in logging.Logger class for type checking arguments. Those interested in
# constructing a new Logger instance should use the loggers.new_logger() function instead.
from logging import Logger
diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py
index a6aec6af20e..3c7901dc6fe 100644
--- a/buildscripts/resmokelib/logging/buildlogger.py
+++ b/buildscripts/resmokelib/logging/buildlogger.py
@@ -1,7 +1,5 @@
"""Define handlers for communicating with a buildlogger server."""
-from __future__ import absolute_import
-
import functools
import json
import os
@@ -85,7 +83,7 @@ class _LogsSplitter(object):
2 is added to each string size to account for the array representation of the logs,
as each line is preceded by a '[' or a space and followed by a ',' or a ']'.
"""
- return len(json.dumps(line, encoding="utf-8")) + 2
+ return len(json.dumps(line)) + 2
curr_logs = []
curr_logs_size = 0
@@ -200,8 +198,8 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
# writing the messages to the fallback logkeeper to avoid putting additional pressure on
# the Evergreen database.
BUILDLOGGER_FALLBACK.warning(
- "Failed to flush all log output (%d messages) to logkeeper.", len(
- self.retry_buffer))
+ "Failed to flush all log output (%d messages) to logkeeper.",
+ len(self.retry_buffer))
# We set a flag to indicate that we failed to flush all log output to logkeeper so
# resmoke.py can exit with a special return code.
@@ -226,10 +224,11 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
@_log_on_error
def _finish_test(self, failed=False):
"""Send a POST request to the APPEND_TEST_LOGS_ENDPOINT with the test status."""
- self.post(self.endpoint, headers={
- "X-Sendlogs-Test-Done": "true",
- "X-Sendlogs-Test-Failed": "true" if failed else "false",
- })
+ self.post(
+ self.endpoint, headers={
+ "X-Sendlogs-Test-Done": "true",
+ "X-Sendlogs-Test-Failed": "true" if failed else "false",
+ })
def close(self):
"""Close the buildlogger handler."""
@@ -262,7 +261,9 @@ class BuildloggerServer(object):
"""Initialize BuildloggerServer."""
tmp_globals = {}
self.config = {}
- execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config)
+ exec(
+ compile(open(_BUILDLOGGER_CONFIG, "rb").read(), _BUILDLOGGER_CONFIG, 'exec'),
+ tmp_globals, self.config)
# Rename "slavename" to "username" if present.
if "slavename" in self.config and "username" not in self.config:
@@ -285,11 +286,12 @@ class BuildloggerServer(object):
handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL, username=username,
password=password, should_retry=True)
- response = handler.post(CREATE_BUILD_ENDPOINT, data={
- "builder": builder,
- "buildnum": build_num,
- "task_id": _config.EVERGREEN_TASK_ID,
- })
+ response = handler.post(
+ CREATE_BUILD_ENDPOINT, data={
+ "builder": builder,
+ "buildnum": build_num,
+ "task_id": _config.EVERGREEN_TASK_ID,
+ })
return response["id"]
diff --git a/buildscripts/resmokelib/logging/flush.py b/buildscripts/resmokelib/logging/flush.py
index f5c2b356468..b812fc2b606 100644
--- a/buildscripts/resmokelib/logging/flush.py
+++ b/buildscripts/resmokelib/logging/flush.py
@@ -3,14 +3,11 @@
These instances are used to send logs to buildlogger.
"""
-from __future__ import absolute_import
-
import logging
+import sched
import threading
import time
-from ..utils import scheduler
-
_FLUSH_THREAD_LOCK = threading.Lock()
_FLUSH_THREAD = None
@@ -96,7 +93,7 @@ class _FlushThread(threading.Thread):
self.__schedule_updated.wait(secs)
self.__schedule_updated.clear()
- self.__scheduler = scheduler.Scheduler(time.time, interruptible_sleep)
+ self.__scheduler = sched.scheduler(time.time, interruptible_sleep)
self.__schedule_updated = threading.Event()
self.__should_stop = threading.Event()
self.__terminated = threading.Event()
diff --git a/buildscripts/resmokelib/logging/formatters.py b/buildscripts/resmokelib/logging/formatters.py
index 450d5d29cd8..b08e736ddcc 100644
--- a/buildscripts/resmokelib/logging/formatters.py
+++ b/buildscripts/resmokelib/logging/formatters.py
@@ -1,7 +1,5 @@
"""Custom formatters for the logging handlers."""
-from __future__ import absolute_import
-
import logging
import time
diff --git a/buildscripts/resmokelib/logging/handlers.py b/buildscripts/resmokelib/logging/handlers.py
index d67bf13f724..fecbde906fa 100644
--- a/buildscripts/resmokelib/logging/handlers.py
+++ b/buildscripts/resmokelib/logging/handlers.py
@@ -1,7 +1,5 @@
"""Additional handlers that are used as the base classes of the buildlogger handler."""
-from __future__ import absolute_import
-
import json
import logging
import sys
@@ -193,18 +191,15 @@ class HTTPHandler(object):
"""
data = utils.default_if_none(data, [])
- data = json.dumps(data, encoding="utf-8")
+ data = json.dumps(data)
headers = utils.default_if_none(headers, {})
headers["Content-Type"] = "application/json; charset=utf-8"
url = self._make_url(endpoint)
- # Versions of Python earlier than 2.7.9 do not support certificate validation. So we
- # disable certificate validation for older Python versions.
- should_validate_certificates = sys.version_info >= (2, 7, 9)
with warnings.catch_warnings():
- if urllib3_exceptions is not None and not should_validate_certificates:
+ if urllib3_exceptions is not None:
try:
warnings.simplefilter("ignore", urllib3_exceptions.InsecurePlatformWarning)
except AttributeError:
@@ -222,8 +217,7 @@ class HTTPHandler(object):
pass
response = self.session.post(url, data=data, headers=headers, timeout=timeout_secs,
- auth=self.auth_handler,
- verify=should_validate_certificates)
+ auth=self.auth_handler, verify=True)
response.raise_for_status()
diff --git a/buildscripts/resmokelib/logging/loggers.py b/buildscripts/resmokelib/logging/loggers.py
index deb3f6c38a2..4028925af5a 100644
--- a/buildscripts/resmokelib/logging/loggers.py
+++ b/buildscripts/resmokelib/logging/loggers.py
@@ -1,7 +1,5 @@
"""Module to hold the logger instances themselves."""
-from __future__ import absolute_import
-
import logging
import sys
@@ -304,8 +302,8 @@ class FixtureNodeLogger(BaseLogger):
def new_fixture_node_logger(self, node_name):
"""Create a new child FixtureNodeLogger."""
- return FixtureNodeLogger(self.fixture_class, self.job_num, "%s:%s" % (self.node_name,
- node_name), self)
+ return FixtureNodeLogger(self.fixture_class, self.job_num,
+ "%s:%s" % (self.node_name, node_name), self)
class TestsRootLogger(RootLogger):
diff --git a/buildscripts/resmokelib/parser.py b/buildscripts/resmokelib/parser.py
index db30a407b8c..80e3b36e516 100644
--- a/buildscripts/resmokelib/parser.py
+++ b/buildscripts/resmokelib/parser.py
@@ -1,7 +1,5 @@
"""Parser for command line arguments."""
-from __future__ import absolute_import
-
import collections
import os
import os.path
@@ -26,40 +24,45 @@ def _make_parser(): # pylint: disable=too-many-statements
"""Create and return the command line arguments parser."""
parser = optparse.OptionParser()
- parser.add_option("--suites", dest="suite_files", metavar="SUITE1,SUITE2",
- help=("Comma separated list of YAML files that each specify the configuration"
- " of a suite. If the file is located in the resmokeconfig/suites/"
- " directory, then the basename without the .yml extension can be"
- " specified, e.g. 'core'. If a list of files is passed in as"
- " positional arguments, they will be run using the suites'"
- " configurations"))
-
- parser.add_option("--log", dest="logger_file", metavar="LOGGER",
- help=("A YAML file that specifies the logging configuration. If the file is"
- " located in the resmokeconfig/suites/ directory, then the basename"
- " without the .yml extension can be specified, e.g. 'console'."))
-
- parser.add_option("--archiveFile", dest="archive_file", metavar="ARCHIVE_FILE",
- help=("Sets the archive file name for the Evergreen task running the tests."
- " The archive file is JSON format containing a list of tests that were"
- " successfully archived to S3. If unspecified, no data files from tests"
- " will be archived in S3. Tests can be designated for archival in the"
- " task suite configuration file."))
-
- parser.add_option("--archiveLimitMb", type="int", dest="archive_limit_mb",
- metavar="ARCHIVE_LIMIT_MB",
- help=("Sets the limit (in MB) for archived files to S3. A value of 0"
- " indicates there is no limit."))
-
- parser.add_option("--archiveLimitTests", type="int", dest="archive_limit_tests",
- metavar="ARCHIVE_LIMIT_TESTS",
- help=("Sets the maximum number of tests to archive to S3. A value"
- " of 0 indicates there is no limit."))
-
- parser.add_option("--basePort", dest="base_port", metavar="PORT",
- help=("The starting port number to use for mongod and mongos processes"
- " spawned by resmoke.py or the tests themselves. Each fixture and Job"
- " allocates a contiguous range of ports."))
+ parser.add_option(
+ "--suites", dest="suite_files", metavar="SUITE1,SUITE2",
+ help=("Comma separated list of YAML files that each specify the configuration"
+ " of a suite. If the file is located in the resmokeconfig/suites/"
+ " directory, then the basename without the .yml extension can be"
+ " specified, e.g. 'core'. If a list of files is passed in as"
+ " positional arguments, they will be run using the suites'"
+ " configurations"))
+
+ parser.add_option(
+ "--log", dest="logger_file", metavar="LOGGER",
+ help=("A YAML file that specifies the logging configuration. If the file is"
+ " located in the resmokeconfig/suites/ directory, then the basename"
+ " without the .yml extension can be specified, e.g. 'console'."))
+
+ parser.add_option(
+ "--archiveFile", dest="archive_file", metavar="ARCHIVE_FILE",
+ help=("Sets the archive file name for the Evergreen task running the tests."
+ " The archive file is JSON format containing a list of tests that were"
+ " successfully archived to S3. If unspecified, no data files from tests"
+ " will be archived in S3. Tests can be designated for archival in the"
+ " task suite configuration file."))
+
+ parser.add_option(
+ "--archiveLimitMb", type="int", dest="archive_limit_mb", metavar="ARCHIVE_LIMIT_MB",
+ help=("Sets the limit (in MB) for archived files to S3. A value of 0"
+ " indicates there is no limit."))
+
+ parser.add_option(
+ "--archiveLimitTests", type="int", dest="archive_limit_tests",
+ metavar="ARCHIVE_LIMIT_TESTS",
+ help=("Sets the maximum number of tests to archive to S3. A value"
+ " of 0 indicates there is no limit."))
+
+ parser.add_option(
+ "--basePort", dest="base_port", metavar="PORT",
+ help=("The starting port number to use for mongod and mongos processes"
+ " spawned by resmoke.py or the tests themselves. Each fixture and Job"
+ " allocates a contiguous range of ports."))
parser.add_option("--buildloggerUrl", action="store", dest="buildlogger_url", metavar="URL",
help="The root url of the buildlogger server.")
@@ -67,19 +70,19 @@ def _make_parser(): # pylint: disable=too-many-statements
parser.add_option("--continueOnFailure", action="store_true", dest="continue_on_failure",
help="Executes all tests in all suites, even if some of them fail.")
- parser.add_option("--dbpathPrefix", dest="dbpath_prefix", metavar="PATH",
- help=("The directory which will contain the dbpaths of any mongod's started"
- " by resmoke.py or the tests themselves."))
+ parser.add_option(
+ "--dbpathPrefix", dest="dbpath_prefix", metavar="PATH",
+ help=("The directory which will contain the dbpaths of any mongod's started"
+ " by resmoke.py or the tests themselves."))
parser.add_option("--dbtest", dest="dbtest_executable", metavar="PATH",
help="The path to the dbtest executable for resmoke to use.")
- parser.add_option("--excludeWithAnyTags", action="append", dest="exclude_with_any_tags",
- metavar="TAG1,TAG2",
- help=("Comma separated list of tags. Any jstest that contains any of the"
- " specified tags will be excluded from any suites that are run."
- " The tag '{}' is implicitly part of this list.".format(
- _config.EXCLUDED_TAG)))
+ parser.add_option(
+ "--excludeWithAnyTags", action="append", dest="exclude_with_any_tags", metavar="TAG1,TAG2",
+ help=("Comma separated list of tags. Any jstest that contains any of the"
+ " specified tags will be excluded from any suites that are run."
+ " The tag '{}' is implicitly part of this list.".format(_config.EXCLUDED_TAG)))
parser.add_option("-f", "--findSuites", action="store_true", dest="find_suites",
help="Lists the names of the suites that will execute the specified tests.")
@@ -87,29 +90,30 @@ def _make_parser(): # pylint: disable=too-many-statements
parser.add_option("--genny", dest="genny_executable", metavar="PATH",
help="The path to the genny executable for resmoke to use.")
- parser.add_option("--spawnUsing", type="choice", dest="spawn_using", choices=("python",
- "jasper"),
- help=("Allows you to spawn resmoke processes using python or Jasper."
- "Defaults to python. Options are 'python' or 'jasper'."))
+ parser.add_option(
+ "--spawnUsing", type="choice", dest="spawn_using", choices=("python", "jasper"),
+ help=("Allows you to spawn resmoke processes using python or Jasper."
+ "Defaults to python. Options are 'python' or 'jasper'."))
- parser.add_option("--includeWithAnyTags", action="append", dest="include_with_any_tags",
- metavar="TAG1,TAG2",
- help=("Comma separated list of tags. For the jstest portion of the suite(s),"
- " only tests which have at least one of the specified tags will be"
- " run."))
+ parser.add_option(
+ "--includeWithAnyTags", action="append", dest="include_with_any_tags", metavar="TAG1,TAG2",
+ help=("Comma separated list of tags. For the jstest portion of the suite(s),"
+ " only tests which have at least one of the specified tags will be"
+ " run."))
parser.add_option("-n", action="store_const", const="tests", dest="dry_run",
help="Outputs the tests that would be run.")
# TODO: add support for --dryRun=commands
- parser.add_option("--dryRun", type="choice", action="store", dest="dry_run",
- choices=("off", "tests"), metavar="MODE",
- help=("Instead of running the tests, outputs the tests that would be run"
- " (if MODE=tests). Defaults to MODE=%default."))
+ parser.add_option(
+ "--dryRun", type="choice", action="store", dest="dry_run", choices=("off", "tests"),
+ metavar="MODE", help=("Instead of running the tests, outputs the tests that would be run"
+ " (if MODE=tests). Defaults to MODE=%default."))
- parser.add_option("-j", "--jobs", type="int", dest="jobs", metavar="JOBS",
- help=("The number of Job instances to use. Each instance will receive its"
- " own MongoDB deployment to dispatch tests to."))
+ parser.add_option(
+ "-j", "--jobs", type="int", dest="jobs", metavar="JOBS",
+ help=("The number of Job instances to use. Each instance will receive its"
+ " own MongoDB deployment to dispatch tests to."))
parser.add_option("-l", "--listSuites", action="store_true", dest="list_suites",
help="Lists the names of the suites available to execute.")
@@ -120,24 +124,27 @@ def _make_parser(): # pylint: disable=too-many-statements
parser.add_option("--mongod", dest="mongod_executable", metavar="PATH",
help="The path to the mongod executable for resmoke.py to use.")
- parser.add_option("--mongodSetParameters", dest="mongod_set_parameters",
- metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Passes one or more --setParameter options to all mongod processes"
- " started by resmoke.py. The argument is specified as bracketed YAML -"
- " i.e. JSON with support for single quoted and unquoted keys."))
+ parser.add_option(
+ "--mongodSetParameters", dest="mongod_set_parameters",
+ metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
+ help=("Passes one or more --setParameter options to all mongod processes"
+ " started by resmoke.py. The argument is specified as bracketed YAML -"
+ " i.e. JSON with support for single quoted and unquoted keys."))
- parser.add_option("--mongoebench", dest="mongoebench_executable", metavar="PATH",
- help=("The path to the mongoebench (benchrun embedded) executable for"
- " resmoke.py to use."))
+ parser.add_option(
+ "--mongoebench", dest="mongoebench_executable", metavar="PATH",
+ help=("The path to the mongoebench (benchrun embedded) executable for"
+ " resmoke.py to use."))
parser.add_option("--mongos", dest="mongos_executable", metavar="PATH",
help="The path to the mongos executable for resmoke.py to use.")
- parser.add_option("--mongosSetParameters", dest="mongos_set_parameters",
- metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Passes one or more --setParameter options to all mongos processes"
- " started by resmoke.py. The argument is specified as bracketed YAML -"
- " i.e. JSON with support for single quoted and unquoted keys."))
+ parser.add_option(
+ "--mongosSetParameters", dest="mongos_set_parameters",
+ metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
+ help=("Passes one or more --setParameter options to all mongos processes"
+ " started by resmoke.py. The argument is specified as bracketed YAML -"
+ " i.e. JSON with support for single quoted and unquoted keys."))
parser.add_option("--nojournal", action="store_true", dest="no_journal",
help="Disables journaling for all mongod's.")
@@ -148,55 +155,61 @@ def _make_parser(): # pylint: disable=too-many-statements
parser.add_option("--perfReportFile", dest="perf_report_file", metavar="PERF_REPORT",
help="Writes a JSON file with performance test results.")
- parser.add_option("--shellConnString", dest="shell_conn_string", metavar="CONN_STRING",
- help="Overrides the default fixture and connects with a mongodb:// connection"
- " string to an existing MongoDB cluster instead. This is useful for"
- " connecting to a MongoDB deployment started outside of resmoke.py including"
- " one running in a debugger.")
+ parser.add_option(
+ "--shellConnString", dest="shell_conn_string", metavar="CONN_STRING",
+ help="Overrides the default fixture and connects with a mongodb:// connection"
+ " string to an existing MongoDB cluster instead. This is useful for"
+ " connecting to a MongoDB deployment started outside of resmoke.py including"
+ " one running in a debugger.")
- parser.add_option("--shellPort", dest="shell_port", metavar="PORT",
- help="Convenience form of --shellConnString for connecting to an"
- " existing MongoDB cluster with the URL mongodb://localhost:[PORT]."
- " This is useful for connecting to a server running in a debugger.")
+ parser.add_option(
+ "--shellPort", dest="shell_port", metavar="PORT",
+ help="Convenience form of --shellConnString for connecting to an"
+ " existing MongoDB cluster with the URL mongodb://localhost:[PORT]."
+ " This is useful for connecting to a server running in a debugger.")
parser.add_option("--repeat", "--repeatSuites", type="int", dest="repeat_suites", metavar="N",
help="Repeats the given suite(s) N times, or until one fails.")
- parser.add_option("--repeatTests", type="int", dest="repeat_tests", metavar="N",
- help="Repeats the tests inside each suite N times. This applies to tests"
- " defined in the suite configuration as well as tests defined on the command"
- " line.")
-
- parser.add_option("--repeatTestsMax", type="int", dest="repeat_tests_max", metavar="N",
- help="Repeats the tests inside each suite no more than N time when"
- " --repeatTestsSecs is specified. This applies to tests defined in the suite"
- " configuration as well as tests defined on the command line.")
-
- parser.add_option("--repeatTestsMin", type="int", dest="repeat_tests_min", metavar="N",
- help="Repeats the tests inside each suite at least N times when"
- " --repeatTestsSecs is specified. This applies to tests defined in the suite"
- " configuration as well as tests defined on the command line.")
-
- parser.add_option("--repeatTestsSecs", type="float", dest="repeat_tests_secs",
- metavar="SECONDS",
- help="Repeats the tests inside each suite this amount of time. Note that"
- " this option is mutually exclusive with --repeatTests. This applies to"
- " tests defined in the suite configuration as well as tests defined on the"
- " command line.")
-
- parser.add_option("--reportFailureStatus", type="choice", action="store",
- dest="report_failure_status", choices=("fail",
- "silentfail"), metavar="STATUS",
- help="Controls if the test failure status should be reported as failed"
- " or be silently ignored (STATUS=silentfail). Dynamic test failures will"
- " never be silently ignored. Defaults to STATUS=%default.")
+ parser.add_option(
+ "--repeatTests", type="int", dest="repeat_tests", metavar="N",
+ help="Repeats the tests inside each suite N times. This applies to tests"
+ " defined in the suite configuration as well as tests defined on the command"
+ " line.")
+
+ parser.add_option(
+ "--repeatTestsMax", type="int", dest="repeat_tests_max", metavar="N",
+ help="Repeats the tests inside each suite no more than N time when"
+ " --repeatTestsSecs is specified. This applies to tests defined in the suite"
+ " configuration as well as tests defined on the command line.")
+
+ parser.add_option(
+ "--repeatTestsMin", type="int", dest="repeat_tests_min", metavar="N",
+ help="Repeats the tests inside each suite at least N times when"
+ " --repeatTestsSecs is specified. This applies to tests defined in the suite"
+ " configuration as well as tests defined on the command line.")
+
+ parser.add_option(
+ "--repeatTestsSecs", type="float", dest="repeat_tests_secs", metavar="SECONDS",
+ help="Repeats the tests inside each suite this amount of time. Note that"
+ " this option is mutually exclusive with --repeatTests. This applies to"
+ " tests defined in the suite configuration as well as tests defined on the"
+ " command line.")
+
+ parser.add_option(
+ "--reportFailureStatus", type="choice", action="store", dest="report_failure_status",
+ choices=("fail", "silentfail"), metavar="STATUS",
+ help="Controls if the test failure status should be reported as failed"
+ " or be silently ignored (STATUS=silentfail). Dynamic test failures will"
+ " never be silently ignored. Defaults to STATUS=%default.")
parser.add_option("--reportFile", dest="report_file", metavar="REPORT",
help="Writes a JSON file with test status and timing information.")
- parser.add_option("--seed", type="int", dest="seed", metavar="SEED",
- help=("Seed for the random number generator. Useful in combination with the"
- " --shuffle option for producing a consistent test execution order."))
+ parser.add_option(
+ "--seed", type="int", dest="seed", metavar="SEED",
+ help=("Seed for the random number generator. Useful in combination with the"
+ " --shuffle option for producing a consistent test execution order."))
parser.add_option("--serviceExecutor", dest="service_executor", metavar="EXECUTOR",
help="The service executor used by jstests")
@@ -212,32 +225,36 @@ def _make_parser(): # pylint: disable=too-many-statements
choices=("commands", "compatibility", "legacy"), metavar="WRITE_MODE",
help="The write mode used by the mongo shell.")
- parser.add_option("--shuffle", action="store_const", const="on", dest="shuffle",
- help=("Randomizes the order in which tests are executed. This is equivalent"
- " to specifying --shuffleMode=on."))
-
- parser.add_option("--shuffleMode", type="choice", action="store", dest="shuffle",
- choices=("on", "off", "auto"), metavar="ON|OFF|AUTO",
- help=("Controls whether to randomize the order in which tests are executed."
- " Defaults to auto when not supplied. auto enables randomization in"
- " all cases except when the number of jobs requested is 1."))
-
- parser.add_option("--staggerJobs", type="choice", action="store", dest="stagger_jobs",
- choices=("on", "off"), metavar="ON|OFF",
- help=("Enables or disables the stagger of launching resmoke jobs."
- " Defaults to %default."))
-
- parser.add_option("--majorityReadConcern", type="choice", action="store",
- dest="majority_read_concern", choices=("on", "off"), metavar="ON|OFF",
- help=("Enable or disable majority read concern support."
- " Defaults to %default."))
+ parser.add_option(
+ "--shuffle", action="store_const", const="on", dest="shuffle",
+ help=("Randomizes the order in which tests are executed. This is equivalent"
+ " to specifying --shuffleMode=on."))
+
+ parser.add_option(
+ "--shuffleMode", type="choice", action="store", dest="shuffle",
+ choices=("on", "off", "auto"), metavar="ON|OFF|AUTO",
+ help=("Controls whether to randomize the order in which tests are executed."
+ " Defaults to auto when not supplied. auto enables randomization in"
+ " all cases except when the number of jobs requested is 1."))
+
+ parser.add_option(
+ "--staggerJobs", type="choice", action="store", dest="stagger_jobs", choices=("on", "off"),
+ metavar="ON|OFF", help=("Enables or disables the stagger of launching resmoke jobs."
+ " Defaults to %default."))
+
+ parser.add_option(
+ "--majorityReadConcern", type="choice", action="store", dest="majority_read_concern",
+ choices=("on",
+ "off"), metavar="ON|OFF", help=("Enable or disable majority read concern support."
+ " Defaults to %default."))
parser.add_option("--storageEngine", dest="storage_engine", metavar="ENGINE",
help="The storage engine used by dbtests and jstests.")
- parser.add_option("--storageEngineCacheSizeGB", dest="storage_engine_cache_size_gb",
- metavar="CONFIG", help="Sets the storage engine cache size configuration"
- " setting for all mongod's.")
+ parser.add_option(
+ "--storageEngineCacheSizeGB", dest="storage_engine_cache_size_gb", metavar="CONFIG",
+ help="Sets the storage engine cache size configuration"
+ " setting for all mongod's.")
parser.add_option("--tagFile", dest="tag_file", metavar="OPTIONS",
help="A YAML file that associates tests and tags.")
@@ -251,9 +268,10 @@ def _make_parser(): # pylint: disable=too-many-statements
parser.add_option("--wiredTigerIndexConfigString", dest="wt_index_config", metavar="CONFIG",
help="Sets the WiredTiger index configuration setting for all mongod's.")
- parser.add_option("--executor", dest="executor_file",
- help="OBSOLETE: Superceded by --suites; specify --suites=SUITE path/to/test"
- " to run a particular test under a particular suite configuration.")
+ parser.add_option(
+ "--executor", dest="executor_file",
+ help="OBSOLETE: Superceded by --suites; specify --suites=SUITE path/to/test"
+ " to run a particular test under a particular suite configuration.")
evergreen_options = optparse.OptionGroup(
parser, title=_EVERGREEN_OPTIONS_TITLE,
@@ -264,29 +282,33 @@ def _make_parser(): # pylint: disable=too-many-statements
evergreen_options.add_option("--buildId", dest="build_id", metavar="BUILD_ID",
help="Sets the build ID of the task.")
- evergreen_options.add_option("--distroId", dest="distro_id", metavar="DISTRO_ID",
- help=("Sets the identifier for the Evergreen distro running the"
- " tests."))
+ evergreen_options.add_option(
+ "--distroId", dest="distro_id", metavar="DISTRO_ID",
+ help=("Sets the identifier for the Evergreen distro running the"
+ " tests."))
- evergreen_options.add_option("--executionNumber", type="int", dest="execution_number",
- metavar="EXECUTION_NUMBER",
- help=("Sets the number for the Evergreen execution running the"
- " tests."))
+ evergreen_options.add_option(
+ "--executionNumber", type="int", dest="execution_number", metavar="EXECUTION_NUMBER",
+ help=("Sets the number for the Evergreen execution running the"
+ " tests."))
- evergreen_options.add_option("--gitRevision", dest="git_revision", metavar="GIT_REVISION",
- help=("Sets the git revision for the Evergreen task running the"
- " tests."))
+ evergreen_options.add_option(
+ "--gitRevision", dest="git_revision", metavar="GIT_REVISION",
+ help=("Sets the git revision for the Evergreen task running the"
+ " tests."))
# We intentionally avoid adding a new command line option that starts with --suite so it doesn't
# become ambiguous with the --suites option and break how engineers run resmoke.py locally.
- evergreen_options.add_option("--originSuite", dest="origin_suite", metavar="SUITE",
- help=("Indicates the name of the test suite prior to the"
- " evergreen_generate_resmoke_tasks.py script splitting it"
- " up."))
+ evergreen_options.add_option(
+ "--originSuite", dest="origin_suite", metavar="SUITE",
+ help=("Indicates the name of the test suite prior to the"
+ " evergreen_generate_resmoke_tasks.py script splitting it"
+ " up."))
- evergreen_options.add_option("--patchBuild", action="store_true", dest="patch_build",
- help=("Indicates that the Evergreen task running the tests is a"
- " patch build."))
+ evergreen_options.add_option(
+ "--patchBuild", action="store_true", dest="patch_build",
+ help=("Indicates that the Evergreen task running the tests is a"
+ " patch build."))
evergreen_options.add_option("--projectName", dest="project_name", metavar="PROJECT_NAME",
help=("Sets the name of the Evergreen project running the tests."))
@@ -301,9 +323,10 @@ def _make_parser(): # pylint: disable=too-many-statements
evergreen_options.add_option("--taskId", dest="task_id", metavar="TASK_ID",
help="Sets the Id of the Evergreen task running the tests.")
- evergreen_options.add_option("--variantName", dest="variant_name", metavar="VARIANT_NAME",
- help=("Sets the name of the Evergreen build variant running the"
- " tests."))
+ evergreen_options.add_option(
+ "--variantName", dest="variant_name", metavar="VARIANT_NAME",
+ help=("Sets the name of the Evergreen build variant running the"
+ " tests."))
evergreen_options.add_option("--versionId", dest="version_id", metavar="VERSION_ID",
help="Sets the version ID of the task.")
@@ -318,10 +341,11 @@ def _make_parser(): # pylint: disable=too-many-statements
metavar="BENCHMARK_FILTER",
help="Regex to filter Google benchmark tests to run.")
- benchmark_options.add_option("--benchmarkListTests", dest="benchmark_list_tests",
- action="store_true", metavar="BENCHMARK_LIST_TESTS",
- help=("Lists all Google benchmark test configurations in each"
- " test file."))
+ benchmark_options.add_option(
+ "--benchmarkListTests", dest="benchmark_list_tests", action="store_true",
+ metavar="BENCHMARK_LIST_TESTS",
+ help=("Lists all Google benchmark test configurations in each"
+ " test file."))
benchmark_min_time_help = (
"Minimum time to run each benchmark/benchrun test for. Use this option instead of "
@@ -339,10 +363,10 @@ def _make_parser(): # pylint: disable=too-many-statements
metavar="BENCHMARK_REPETITIONS", help=benchmark_repetitions_help)
benchrun_devices = ["Android", "Desktop"]
- benchmark_options.add_option("--benchrunDevice", dest="benchrun_device", metavar="DEVICE",
- type="choice", action="store", choices=benchrun_devices,
- help=("The device to run the benchrun test on, choose from {}."
- " Defaults to DEVICE='%default'.".format(benchrun_devices)))
+ benchmark_options.add_option(
+ "--benchrunDevice", dest="benchrun_device", metavar="DEVICE", type="choice", action="store",
+ choices=benchrun_devices, help=("The device to run the benchrun test on, choose from {}."
+ " Defaults to DEVICE='%default'.".format(benchrun_devices)))
benchmark_options.add_option("--benchrunReportRoot", dest="benchrun_report_root",
metavar="PATH", help="The root path for benchrun test report.")
@@ -436,8 +460,8 @@ def to_local_args(args=None): # pylint: disable=too-many-locals
else:
other_local_args.append(option_name)
- return [arg for arg in (suites_arg, storage_engine_arg)
- if arg is not None] + other_local_args + extra_args
+ return [arg for arg in (suites_arg, storage_engine_arg) if arg is not None
+ ] + other_local_args + extra_args
def parse_command_line():
@@ -613,7 +637,7 @@ def _update_config_vars(values): # pylint: disable=too-many-statements
_config.SHELL_CONN_STRING = conn_string
if config:
- raise optparse.OptionValueError("Unknown option(s): %s" % (config.keys()))
+ raise optparse.OptionValueError("Unknown option(s): %s" % (list(config.keys())))
def _get_logging_config(pathname):
diff --git a/buildscripts/resmokelib/reportfile.py b/buildscripts/resmokelib/reportfile.py
index 00841de2bc9..d4346b349f1 100644
--- a/buildscripts/resmokelib/reportfile.py
+++ b/buildscripts/resmokelib/reportfile.py
@@ -1,7 +1,5 @@
"""Manage interactions with the report.json file."""
-from __future__ import absolute_import
-
import json
from . import config
diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py
index b9f5686df8b..36f2e608889 100644
--- a/buildscripts/resmokelib/selector.py
+++ b/buildscripts/resmokelib/selector.py
@@ -4,8 +4,6 @@ Defines filtering rules for what tests to include in a suite depending
on whether they apply to C++ unit tests, dbtests, or JS tests.
"""
-from __future__ import absolute_import
-
import collections
import errno
import fnmatch
@@ -71,7 +69,7 @@ class TestFileExplorer(object):
A list of paths as a list(str).
"""
tests = []
- with open(root_file_path, "rb") as filep:
+ with open(root_file_path, "r") as filep:
for test_path in filep:
test_path = test_path.strip()
tests.append(test_path)
@@ -114,7 +112,7 @@ class TestFileExplorer(object):
program = subprocess.Popen(command, stdout=subprocess.PIPE)
stdout = program.communicate()[0]
- return program.returncode, stdout
+ return program.returncode, stdout.decode("utf-8")
@staticmethod
def parse_tag_file(test_kind):
@@ -313,7 +311,7 @@ def make_expression(conf):
elif isinstance(conf, dict):
if len(conf) != 1:
raise ValueError("Tag matching expressions should only contain one key")
- key = conf.keys()[0]
+ key = list(conf.keys())[0]
value = conf[key]
if key == "$allOf":
return _AllOfExpression(_make_expression_list(value))
diff --git a/buildscripts/resmokelib/sighandler.py b/buildscripts/resmokelib/sighandler.py
index c67d44eb759..5591e325a11 100644
--- a/buildscripts/resmokelib/sighandler.py
+++ b/buildscripts/resmokelib/sighandler.py
@@ -1,7 +1,5 @@
"""Utility to support asynchronously signaling the current process."""
-from __future__ import absolute_import
-
import atexit
import os
import signal
diff --git a/buildscripts/resmokelib/suitesconfig.py b/buildscripts/resmokelib/suitesconfig.py
index 156f15e1413..d55ce7782dd 100644
--- a/buildscripts/resmokelib/suitesconfig.py
+++ b/buildscripts/resmokelib/suitesconfig.py
@@ -1,7 +1,5 @@
"""Module for retrieving the configuration of resmoke.py test suites."""
-from __future__ import absolute_import
-
import collections
import optparse
import os
@@ -33,7 +31,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
"""
if test_kind is not None:
- if isinstance(test_kind, basestring):
+ if isinstance(test_kind, str):
test_kind = [test_kind]
test_kind = frozenset(test_kind)
@@ -117,6 +115,6 @@ def _get_yaml_config(kind, pathname):
pathname = resmokeconfig.NAMED_SUITES[pathname] # Expand 'pathname' to full path.
if not utils.is_yaml_file(pathname) or not os.path.isfile(pathname):
- raise optparse.OptionValueError("Expected a %s YAML config, but got '%s'" % (kind,
- pathname))
+ raise optparse.OptionValueError(
+ "Expected a %s YAML config, but got '%s'" % (kind, pathname))
return utils.load_yaml_file(pathname)
diff --git a/buildscripts/resmokelib/testing/__init__.py b/buildscripts/resmokelib/testing/__init__.py
index eb58f41f7fe..2a276f6bf85 100644
--- a/buildscripts/resmokelib/testing/__init__.py
+++ b/buildscripts/resmokelib/testing/__init__.py
@@ -1,6 +1,4 @@
"""Extension to the unittest package to support buildlogger and parallel test execution."""
-from __future__ import absolute_import
-
from . import executor
from . import suite
diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py
index 024cce166a4..71fcd75f6d1 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -1,7 +1,5 @@
"""Driver of the test execution framework."""
-from __future__ import absolute_import
-
import threading
import time
@@ -68,7 +66,7 @@ class TestSuiteExecutor(object): # pylint: disable=too-many-instance-attributes
jobs_to_start = self.num_tests
# Must be done after getting buildlogger configuration.
- self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)]
+ self._jobs = [self._make_job(job_num) for job_num in range(jobs_to_start)]
def run(self):
"""Execute the test suite.
@@ -130,8 +128,9 @@ class TestSuiteExecutor(object): # pylint: disable=too-many-instance-attributes
test_results_num = len(test_report["results"])
# There should be at least as many tests results as expected number of tests.
if test_results_num < self.num_tests:
- raise errors.ResmokeError("{} reported tests is less than {} expected tests"
- .format(test_results_num, self.num_tests))
+ raise errors.ResmokeError(
+ "{} reported tests is less than {} expected tests".format(
+ test_results_num, self.num_tests))
# Clear the report so it can be reused for the next execution.
for job in self._jobs:
@@ -157,8 +156,9 @@ class TestSuiteExecutor(object): # pylint: disable=too-many-instance-attributes
try:
# Run each Job instance in its own thread.
for job in self._jobs:
- thr = threading.Thread(target=job, args=(test_queue, interrupt_flag), kwargs=dict(
- setup_flag=setup_flag, teardown_flag=teardown_flag))
+ thr = threading.Thread(
+ target=job, args=(test_queue, interrupt_flag), kwargs=dict(
+ setup_flag=setup_flag, teardown_flag=teardown_flag))
# Do not wait for tests to finish executing if interrupted by the user.
thr.daemon = True
thr.start()
diff --git a/buildscripts/resmokelib/testing/fixtures/__init__.py b/buildscripts/resmokelib/testing/fixtures/__init__.py
index 1c592c8f6c7..e70ad8259ee 100644
--- a/buildscripts/resmokelib/testing/fixtures/__init__.py
+++ b/buildscripts/resmokelib/testing/fixtures/__init__.py
@@ -1,7 +1,5 @@
"""Fixture for executing JSTests against."""
-from __future__ import absolute_import
-
from .external import ExternalFixture as _ExternalFixture
from .interface import NoOpFixture as _NoOpFixture
from .interface import make_fixture
diff --git a/buildscripts/resmokelib/testing/fixtures/external.py b/buildscripts/resmokelib/testing/fixtures/external.py
index ff5eba3d652..091059f68bd 100644
--- a/buildscripts/resmokelib/testing/fixtures/external.py
+++ b/buildscripts/resmokelib/testing/fixtures/external.py
@@ -1,7 +1,5 @@
"""External fixture for executing JSTests against."""
-from __future__ import absolute_import
-
from . import interface
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py
index 4a38051fffd..b6b028ceb45 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -1,7 +1,5 @@
"""Interface of the different fixtures for executing JSTests against."""
-from __future__ import absolute_import
-
import os.path
import time
@@ -25,11 +23,9 @@ def make_fixture(class_name, *args, **kwargs):
return _FIXTURES[class_name](*args, **kwargs)
-class Fixture(object):
+class Fixture(object, metaclass=registry.make_registry_metaclass(_FIXTURES)):
"""Base class for all fixtures."""
- __metaclass__ = registry.make_registry_metaclass(_FIXTURES) # type: ignore
-
# We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute
# is defined for all subclasses of Fixture.
REGISTERED_NAME = "Fixture"
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index 88d57cde26a..9c449dbc432 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -1,10 +1,9 @@
"""Replica set fixture for executing JSTests against."""
-from __future__ import absolute_import
-
import os.path
import time
+import bson.errors
import pymongo
import pymongo.errors
import pymongo.write_concern
@@ -77,11 +76,11 @@ class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-inst
self.replset_name = self.mongod_options.get("replSet", "rs")
if not self.nodes:
- for i in xrange(self.num_nodes):
+ for i in range(self.num_nodes):
node = self._new_mongod(i, self.replset_name)
self.nodes.append(node)
- for i in xrange(self.num_nodes):
+ for i in range(self.num_nodes):
if self.linear_chain and i > 0:
self.nodes[i].mongod_options["set_parameters"][
"failpoint.forceSyncSourceCandidate"] = {
@@ -207,10 +206,17 @@ class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-inst
def check_rcmaj_optime(client, node):
"""Return True if all nodes have caught up with the primary."""
- res = client.admin.command({"replSetGetStatus": 1})
+ # TODO SERVER-40078: The server is reporting invalid
+ # dates in its response to the replSetGetStatus
+ # command
+ try:
+ res = client.admin.command({"replSetGetStatus": 1})
+ except bson.errors.InvalidBSON:
+ return False
read_concern_majority_optime = res["optimes"]["readConcernMajorityOpTime"]
- if read_concern_majority_optime >= primary_optime:
+ if (read_concern_majority_optime["t"] == primary_optime["t"]
+ and read_concern_majority_optime["ts"] >= primary_optime["ts"]):
up_to_date_nodes.add(node.port)
return len(up_to_date_nodes) == len(self.nodes)
@@ -303,7 +309,15 @@ class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-inst
client_admin = client["admin"]
while True:
- status = client_admin.command("replSetGetStatus")
+ # TODO SERVER-40078: The server is reporting invalid
+ # dates in its response to the replSetGetStatus
+ # command
+ try:
+ status = client_admin.command("replSetGetStatus")
+ except bson.errors.InvalidBSON:
+ time.sleep(0.1)
+ continue
+
# The `lastStableRecoveryTimestamp` field contains a stable timestamp guaranteed to
# exist on storage engine recovery to a stable timestamp.
last_stable_recovery_timestamp = status.get("lastStableRecoveryTimestamp", None)
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
index f646910a3bb..b3b744095ef 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -1,7 +1,5 @@
"""Sharded cluster fixture for executing JSTests against."""
-from __future__ import absolute_import
-
import os.path
import time
@@ -69,7 +67,7 @@ class ShardedClusterFixture(interface.Fixture): # pylint: disable=too-many-inst
self.configsvr.setup()
if not self.shards:
- for i in xrange(self.num_shards):
+ for i in range(self.num_shards):
if self.num_rs_nodes_per_shard is None:
shard = self._new_standalone_shard(i)
elif isinstance(self.num_rs_nodes_per_shard, int):
diff --git a/buildscripts/resmokelib/testing/fixtures/standalone.py b/buildscripts/resmokelib/testing/fixtures/standalone.py
index 4174462c07e..0265832dbcb 100644
--- a/buildscripts/resmokelib/testing/fixtures/standalone.py
+++ b/buildscripts/resmokelib/testing/fixtures/standalone.py
@@ -1,7 +1,5 @@
"""Standalone mongod fixture for executing JSTests against."""
-from __future__ import absolute_import
-
import os
import os.path
import time
diff --git a/buildscripts/resmokelib/testing/fixtures/yesfixture.py b/buildscripts/resmokelib/testing/fixtures/yesfixture.py
index 618ba8a48cc..eaaf0a1be56 100644
--- a/buildscripts/resmokelib/testing/fixtures/yesfixture.py
+++ b/buildscripts/resmokelib/testing/fixtures/yesfixture.py
@@ -1,7 +1,5 @@
"""Fixture for generating lots of log messages."""
-from __future__ import absolute_import
-
import signal
from . import interface
diff --git a/buildscripts/resmokelib/testing/hook_test_archival.py b/buildscripts/resmokelib/testing/hook_test_archival.py
index b0ef3725eb2..dd3ec6fbf9b 100644
--- a/buildscripts/resmokelib/testing/hook_test_archival.py
+++ b/buildscripts/resmokelib/testing/hook_test_archival.py
@@ -1,7 +1,5 @@
"""Enable support for archiving tests or hooks."""
-from __future__ import absolute_import
-
import os
import threading
diff --git a/buildscripts/resmokelib/testing/hooks/__init__.py b/buildscripts/resmokelib/testing/hooks/__init__.py
index 82772aa25da..e24cf9eae77 100644
--- a/buildscripts/resmokelib/testing/hooks/__init__.py
+++ b/buildscripts/resmokelib/testing/hooks/__init__.py
@@ -5,8 +5,6 @@ by allowing special code to be executed before or after each test, and
before or after each suite.
"""
-from __future__ import absolute_import
-
from .interface import make_hook
from ...utils import autoloader as _autoloader
diff --git a/buildscripts/resmokelib/testing/hooks/cleanup.py b/buildscripts/resmokelib/testing/hooks/cleanup.py
index ebbda2f1edb..0abf025f588 100644
--- a/buildscripts/resmokelib/testing/hooks/cleanup.py
+++ b/buildscripts/resmokelib/testing/hooks/cleanup.py
@@ -1,7 +1,5 @@
"""Test hook for cleaning up data files created by the fixture."""
-from __future__ import absolute_import
-
import os
from . import interface
@@ -22,8 +20,9 @@ class CleanEveryN(interface.Hook):
# Try to isolate what test triggers the leak by restarting the fixture each time.
if "detect_leaks=1" in os.getenv("ASAN_OPTIONS", ""):
- self.logger.info("ASAN_OPTIONS environment variable set to detect leaks, so restarting"
- " the fixture after each test instead of after every %d.", n)
+ self.logger.info(
+ "ASAN_OPTIONS environment variable set to detect leaks, so restarting"
+ " the fixture after each test instead of after every %d.", n)
n = 1
self.n = n # pylint: disable=invalid-name
diff --git a/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py b/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py
index 4719b806cf2..9f5e0957514 100644
--- a/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py
+++ b/buildscripts/resmokelib/testing/hooks/cleanup_concurrency_workloads.py
@@ -1,7 +1,5 @@
"""Test hook for dropping databases created by the fixture."""
-from __future__ import absolute_import
-
import copy
from buildscripts.resmokelib import utils
diff --git a/buildscripts/resmokelib/testing/hooks/collect_embedded_resources.py b/buildscripts/resmokelib/testing/hooks/collect_embedded_resources.py
index f6ddffc3963..f39399d6dc8 100644
--- a/buildscripts/resmokelib/testing/hooks/collect_embedded_resources.py
+++ b/buildscripts/resmokelib/testing/hooks/collect_embedded_resources.py
@@ -1,7 +1,5 @@
"""Module for generating and collecting embedded resource results."""
-from __future__ import absolute_import
-
import os
from buildscripts.mobile import adb_monitor
diff --git a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
index 77f0263b2cc..ec4b6d53235 100644
--- a/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
+++ b/buildscripts/resmokelib/testing/hooks/combine_benchmark_results.py
@@ -1,8 +1,5 @@
"""Module for generating the test results file fed into the perf plugin."""
-from __future__ import absolute_import
-from __future__ import division
-
import collections
import datetime
import json
@@ -71,7 +68,7 @@ class CombineBenchmarkResults(interface.Hook):
"results": []
}
- for name, report in self.benchmark_reports.items():
+ for name, report in list(self.benchmark_reports.items()):
test_report = {
"name": name, "context": report.context._asdict(),
"results": report.generate_perf_plugin_dict()
@@ -168,7 +165,7 @@ class _BenchmarkThreadsReport(object):
"""
res = {}
- for thread_count, reports in self.thread_benchmark_map.items():
+ for thread_count, reports in list(self.thread_benchmark_map.items()):
thread_report = {
"error_values": [0 for _ in range(len(reports))],
"ops_per_sec_values": [] # This is actually storing latency per op, not ops/s
diff --git a/buildscripts/resmokelib/testing/hooks/combine_benchrun_embedded_results.py b/buildscripts/resmokelib/testing/hooks/combine_benchrun_embedded_results.py
index b70e57d8741..62f442dba32 100644
--- a/buildscripts/resmokelib/testing/hooks/combine_benchrun_embedded_results.py
+++ b/buildscripts/resmokelib/testing/hooks/combine_benchrun_embedded_results.py
@@ -1,8 +1,5 @@
"""Module for generating the test results file fed into the perf plugin."""
-from __future__ import absolute_import
-from __future__ import division
-
import collections
import datetime
import glob
@@ -62,7 +59,7 @@ class CombineBenchrunEmbeddedResults(cbr.CombineBenchmarkResults):
"results": []
}
- for name, report in self.benchmark_reports.items():
+ for name, report in list(self.benchmark_reports.items()):
test_report = {"name": name, "results": report.generate_perf_plugin_dict()}
perf_report["results"].append(test_report)
@@ -143,7 +140,7 @@ class _BenchrunEmbeddedThreadsReport(object):
"""
res = {}
- for thread_count, reports in self.thread_benchmark_map.items():
+ for thread_count, reports in list(self.thread_benchmark_map.items()):
thread_report = {"error_values": [], "ops_per_sec_values": []}
for report in reports:
diff --git a/buildscripts/resmokelib/testing/hooks/dbhash.py b/buildscripts/resmokelib/testing/hooks/dbhash.py
index 40caa5149c6..557c32bb29d 100644
--- a/buildscripts/resmokelib/testing/hooks/dbhash.py
+++ b/buildscripts/resmokelib/testing/hooks/dbhash.py
@@ -1,7 +1,5 @@
"""Test hook for verifying data consistency across a replica set."""
-from __future__ import absolute_import
-
import os.path
from . import jsfile
diff --git a/buildscripts/resmokelib/testing/hooks/dbhash_background.py b/buildscripts/resmokelib/testing/hooks/dbhash_background.py
index b8122f458b8..9cea6d6957e 100644
--- a/buildscripts/resmokelib/testing/hooks/dbhash_background.py
+++ b/buildscripts/resmokelib/testing/hooks/dbhash_background.py
@@ -4,8 +4,6 @@ Unlike dbhash.py, this version of the hook runs continously in a background thre
running.
"""
-from __future__ import absolute_import
-
import os.path
import sys
import threading
@@ -32,13 +30,14 @@ class CheckReplDBHashInBackground(jsfile.JSHook):
client = self.fixture.mongo_client()
server_status = client.admin.command("serverStatus")
if not server_status["storageEngine"].get("supportsSnapshotReadConcern", False):
- self.logger.info("Not enabling the background thread because '%s' storage engine"
- " doesn't support snapshot reads.",
- server_status["storageEngine"]["name"])
+ self.logger.info(
+ "Not enabling the background thread because '%s' storage engine"
+ " doesn't support snapshot reads.", server_status["storageEngine"]["name"])
return
if not server_status["storageEngine"].get("persistent", False):
- self.logger.info("Not enabling the background thread because '%s' storage engine"
- " is not persistent.", server_status["storageEngine"]["name"])
+ self.logger.info(
+ "Not enabling the background thread because '%s' storage engine"
+ " is not persistent.", server_status["storageEngine"]["name"])
return
self._background_job = _BackgroundJob()
diff --git a/buildscripts/resmokelib/testing/hooks/drop_sharded_collections.py b/buildscripts/resmokelib/testing/hooks/drop_sharded_collections.py
index 90752571909..9a5f15715b1 100644
--- a/buildscripts/resmokelib/testing/hooks/drop_sharded_collections.py
+++ b/buildscripts/resmokelib/testing/hooks/drop_sharded_collections.py
@@ -1,7 +1,5 @@
"""Hook for cleaning up sharded collections created during tests."""
-from __future__ import absolute_import
-
import os.path
from . import jsfile
diff --git a/buildscripts/resmokelib/testing/hooks/initialsync.py b/buildscripts/resmokelib/testing/hooks/initialsync.py
index 36ada61ab00..02156068859 100644
--- a/buildscripts/resmokelib/testing/hooks/initialsync.py
+++ b/buildscripts/resmokelib/testing/hooks/initialsync.py
@@ -1,11 +1,10 @@
"""Test hook for verifying correctness of initial sync."""
-from __future__ import absolute_import
-
import os.path
import random
import bson
+import bson.errors
import pymongo.errors
from . import cleanup
@@ -73,28 +72,38 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
# If it's been 'n' tests so far, wait for the initial sync node to finish syncing.
if self._hook.tests_run >= self._hook.n:
- self.logger.info("%d tests have been run against the fixture, waiting for initial sync"
- " node to go into SECONDARY state", self._hook.tests_run)
+ self.logger.info(
+ "%d tests have been run against the fixture, waiting for initial sync"
+ " node to go into SECONDARY state", self._hook.tests_run)
self._hook.tests_run = 0
- cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2), ("timeoutMillis",
- 20 * 60 * 1000)])
+ cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2),
+ ("timeoutMillis", 20 * 60 * 1000)])
sync_node_conn.admin.command(cmd)
# Check if the initial sync node is in SECONDARY state. If it's been 'n' tests, then it
# should have waited to be in SECONDARY state and the test should be marked as a failure.
# Otherwise, we just skip the hook and will check again after the next test.
try:
- state = sync_node_conn.admin.command("replSetGetStatus").get("myState")
+ while True:
+ # TODO SERVER-40078: The server is reporting invalid
+ # dates in its response to the replSetGetStatus
+ # command
+ try:
+ state = sync_node_conn.admin.command("replSetGetStatus").get("myState")
+ break
+ except bson.errors.InvalidBSON:
+ continue
+
if state != 2:
if self._hook.tests_run == 0:
msg = "Initial sync node did not catch up after waiting 20 minutes"
self.logger.exception("{0} failed: {1}".format(self._hook.description, msg))
raise errors.TestFailure(msg)
- self.logger.info("Initial sync node is in state %d, not state SECONDARY (2)."
- " Skipping BackgroundInitialSync hook for %s", state,
- self._base_test_name)
+ self.logger.info(
+ "Initial sync node is in state %d, not state SECONDARY (2)."
+ " Skipping BackgroundInitialSync hook for %s", state, self._base_test_name)
# If we have not restarted initial sync since the last time we ran the data
# validation, restart initial sync with a 20% probability.
@@ -197,8 +206,8 @@ class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase):
# Do initial sync round.
self.logger.info("Waiting for initial sync node to go into SECONDARY state")
- cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2), ("timeoutMillis",
- 20 * 60 * 1000)])
+ cmd = bson.SON([("replSetTest", 1), ("waitForMemberState", 2),
+ ("timeoutMillis", 20 * 60 * 1000)])
sync_node_conn.admin.command(cmd)
# Run data validation and dbhash checking.
diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py
index fe53f820fc5..d8ac37e159d 100644
--- a/buildscripts/resmokelib/testing/hooks/interface.py
+++ b/buildscripts/resmokelib/testing/hooks/interface.py
@@ -1,7 +1,5 @@
"""Interface for customizing the behavior of a test fixture."""
-from __future__ import absolute_import
-
import sys
from ..testcases import interface as testcase
@@ -21,11 +19,9 @@ def make_hook(class_name, *args, **kwargs):
return _HOOKS[class_name](*args, **kwargs)
-class Hook(object):
+class Hook(object, metaclass=registry.make_registry_metaclass(_HOOKS)):
"""Common interface all Hooks will inherit from."""
- __metaclass__ = registry.make_registry_metaclass(_HOOKS) # type: ignore
-
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def __init__(self, hook_logger, fixture, description):
diff --git a/buildscripts/resmokelib/testing/hooks/jsfile.py b/buildscripts/resmokelib/testing/hooks/jsfile.py
index e95d3d6d780..6eedb3f87c4 100644
--- a/buildscripts/resmokelib/testing/hooks/jsfile.py
+++ b/buildscripts/resmokelib/testing/hooks/jsfile.py
@@ -1,7 +1,5 @@
"""Interface for customizing the behavior of a test fixture by executing a JavaScript file."""
-from __future__ import absolute_import
-
from . import interface
from ..testcases import jstest
from ... import errors
diff --git a/buildscripts/resmokelib/testing/hooks/oplog.py b/buildscripts/resmokelib/testing/hooks/oplog.py
index ceb81bb8fd6..6822eecabf0 100644
--- a/buildscripts/resmokelib/testing/hooks/oplog.py
+++ b/buildscripts/resmokelib/testing/hooks/oplog.py
@@ -1,7 +1,5 @@
"""Test hook for verifying members of a replica set have matching oplogs."""
-from __future__ import absolute_import
-
import os.path
from . import jsfile
diff --git a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
index a6f44278140..daf18ad7671 100644
--- a/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
+++ b/buildscripts/resmokelib/testing/hooks/periodic_kill_secondaries.py
@@ -1,7 +1,5 @@
"""Test hook for verifying correctness of secondary's behavior during an unclean shutdown."""
-from __future__ import absolute_import
-
import time
import bson
@@ -189,8 +187,9 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
for secondary in self.fixture.get_secondaries():
self._check_invariants_as_standalone(secondary)
- self.logger.info("Restarting the secondary on port %d as a replica set node with"
- " its data files intact...", secondary.port)
+ self.logger.info(
+ "Restarting the secondary on port %d as a replica set node with"
+ " its data files intact...", secondary.port)
# Start the 'secondary' mongod back up as part of the replica set and wait for it to
# reach state SECONDARY.
secondary.setup()
@@ -253,12 +252,13 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
self.fixture.setup()
self.fixture.await_ready()
- def _check_invariants_as_standalone(self, secondary):
+ def _check_invariants_as_standalone(self, secondary): # pylint: disable=too-many-locals
# pylint: disable=too-many-branches,too-many-statements
# We remove the --replSet option in order to start the node as a standalone.
replset_name = secondary.mongod_options.pop("replSet")
- self.logger.info("Restarting the secondary on port %d as a standalone node with"
- " its data files intact...", secondary.port)
+ self.logger.info(
+ "Restarting the secondary on port %d as a standalone node with"
+ " its data files intact...", secondary.port)
try:
secondary.setup()
diff --git a/buildscripts/resmokelib/testing/hooks/stepdown.py b/buildscripts/resmokelib/testing/hooks/stepdown.py
index fbda653a324..f218fffe49e 100644
--- a/buildscripts/resmokelib/testing/hooks/stepdown.py
+++ b/buildscripts/resmokelib/testing/hooks/stepdown.py
@@ -1,5 +1,4 @@
"""Test hook that periodically makes the primary of a replica set step down."""
-from __future__ import absolute_import
import collections
import os.path
@@ -179,7 +178,7 @@ class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-at
try:
while True:
- if self._is_stopped():
+ if self.__is_stopped():
break
self._wait_for_permission_or_resume()
now = time.time()
@@ -210,7 +209,7 @@ class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-at
self.resume()
self.join()
- def _is_stopped(self):
+ def __is_stopped(self):
return self._is_stopped_evt.is_set()
def pause(self):
@@ -234,7 +233,7 @@ class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-at
def _wait_for_permission_or_resume(self):
# Wait until stop, _stepdown_permitted_file or resume.
if self._stepdown_permitted_file:
- while not os.path.isfile(self._stepdown_permitted_file) and not self._is_stopped():
+ while not os.path.isfile(self._stepdown_permitted_file) and not self.__is_stopped():
# Set a short sleep during busy wait time for self._stepdown_permitted_file.
self._wait(0.1)
else:
diff --git a/buildscripts/resmokelib/testing/hooks/validate.py b/buildscripts/resmokelib/testing/hooks/validate.py
index 3239ddbdc06..81bd42ba6d9 100644
--- a/buildscripts/resmokelib/testing/hooks/validate.py
+++ b/buildscripts/resmokelib/testing/hooks/validate.py
@@ -1,7 +1,5 @@
"""Test hook for verifying the consistency and integrity of collection and index data."""
-from __future__ import absolute_import
-
import os.path
from . import jsfile
diff --git a/buildscripts/resmokelib/testing/hooks/wait_for_replication.py b/buildscripts/resmokelib/testing/hooks/wait_for_replication.py
index fbd786a170c..8720f9456e6 100644
--- a/buildscripts/resmokelib/testing/hooks/wait_for_replication.py
+++ b/buildscripts/resmokelib/testing/hooks/wait_for_replication.py
@@ -1,7 +1,5 @@
"""Test hook to wait for replication to complete on a replica set."""
-from __future__ import absolute_import
-
import time
from buildscripts.resmokelib import core
diff --git a/buildscripts/resmokelib/testing/job.py b/buildscripts/resmokelib/testing/job.py
index 908ae85832a..9b5023d0656 100644
--- a/buildscripts/resmokelib/testing/job.py
+++ b/buildscripts/resmokelib/testing/job.py
@@ -1,7 +1,5 @@
"""Enable running tests simultaneously by processing them from a multi-consumer queue."""
-from __future__ import absolute_import
-
import sys
import time
@@ -175,8 +173,8 @@ class Job(object): # pylint: disable=too-many-instance-attributes
test.short_description())
self.report.setFailure(test, return_code=2)
# Always fail fast if the fixture fails.
- raise errors.StopExecution("%s not running after %s" % (self.fixture,
- test.short_description()))
+ raise errors.StopExecution(
+ "%s not running after %s" % (self.fixture, test.short_description()))
finally:
success = self.report.find_test_info(test).status == "pass"
if self.archival:
diff --git a/buildscripts/resmokelib/testing/report.py b/buildscripts/resmokelib/testing/report.py
index fad0d20a0a2..3365a17075a 100644
--- a/buildscripts/resmokelib/testing/report.py
+++ b/buildscripts/resmokelib/testing/report.py
@@ -3,8 +3,6 @@
This is used to support additional test status and timing information for the report.json file.
"""
-from __future__ import absolute_import
-
import copy
import threading
import time
diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py
index e0f2dda5151..b369261e147 100644
--- a/buildscripts/resmokelib/testing/suite.py
+++ b/buildscripts/resmokelib/testing/suite.py
@@ -1,7 +1,5 @@
"""Holder for the (test kind, list of tests) pair with additional metadata their execution."""
-from __future__ import absolute_import
-
import itertools
import threading
import time
@@ -269,7 +267,7 @@ class Suite(object): # pylint: disable=too-many-instance-attributes
sb.append("Executed %d times in %0.2f seconds:" % (num_iterations, total_time_taken))
combined_summary = _summary.Summary(0, 0.0, 0, 0, 0, 0)
- for iteration in xrange(num_iterations):
+ for iteration in range(num_iterations):
# Summarize each execution as a bulleted list of results.
bulleter_sb = []
summary = self._summarize_report(reports[iteration], start_times[iteration],
@@ -341,8 +339,8 @@ class Suite(object): # pylint: disable=too-many-instance-attributes
def log_summaries(logger, suites, time_taken):
"""Log summary of all suites."""
sb = []
- sb.append("Summary of all suites: %d suites ran in %0.2f seconds" % (len(suites),
- time_taken))
+ sb.append(
+ "Summary of all suites: %d suites ran in %0.2f seconds" % (len(suites), time_taken))
for suite in suites:
suite_sb = []
suite.summarize(suite_sb)
diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py
index dc92e0b5b34..a5d439b64b0 100644
--- a/buildscripts/resmokelib/testing/summary.py
+++ b/buildscripts/resmokelib/testing/summary.py
@@ -1,7 +1,5 @@
"""Holder for summary information about a test suite."""
-from __future__ import absolute_import
-
import collections
Summary = collections.namedtuple(
@@ -12,6 +10,6 @@ Summary = collections.namedtuple(
def combine(summary1, summary2):
"""Return a summary representing the sum of 'summary1' and 'summary2'."""
args = []
- for i in xrange(len(Summary._fields)):
+ for i in range(len(Summary._fields)):
args.append(summary1[i] + summary2[i])
return Summary._make(args)
diff --git a/buildscripts/resmokelib/testing/testcases/__init__.py b/buildscripts/resmokelib/testing/testcases/__init__.py
index 52869d99de8..d828d97a0b3 100644
--- a/buildscripts/resmokelib/testing/testcases/__init__.py
+++ b/buildscripts/resmokelib/testing/testcases/__init__.py
@@ -1,7 +1,5 @@
"""Package containing subclasses of unittest.TestCase."""
-from __future__ import absolute_import
-
from .interface import make_test_case
from ...utils import autoloader as _autoloader
diff --git a/buildscripts/resmokelib/testing/testcases/benchmark_test.py b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
index ea506c4f7e5..e7760799e42 100644
--- a/buildscripts/resmokelib/testing/testcases/benchmark_test.py
+++ b/buildscripts/resmokelib/testing/testcases/benchmark_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for tests using a MongoDB vendored version of Google Benchmark."""
-from __future__ import absolute_import
-
from buildscripts.resmokelib import config as _config
from buildscripts.resmokelib import core
from buildscripts.resmokelib import parser
@@ -52,7 +50,7 @@ class BenchmarkTestCase(interface.ProcessTestCase):
"benchmark_repetitions": _config.BENCHMARK_REPETITIONS
}
- for key, value in resmoke_bm_options.items():
+ for key, value in list(resmoke_bm_options.items()):
if value is not None:
# 4. sanitize options before passing them to Benchmark's command line.
if key == "benchmark_min_time":
diff --git a/buildscripts/resmokelib/testing/testcases/benchrun_embedded_test.py b/buildscripts/resmokelib/testing/testcases/benchrun_embedded_test.py
index 89b95424cdb..d4666c94074 100644
--- a/buildscripts/resmokelib/testing/testcases/benchrun_embedded_test.py
+++ b/buildscripts/resmokelib/testing/testcases/benchrun_embedded_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for tests using benchrun embedded (mongoebench)."""
-from __future__ import absolute_import
-
import os
import posixpath
@@ -73,7 +71,7 @@ class BenchrunEmbeddedTestCase( # pylint: disable=too-many-instance-attributes
# 3. Override Benchmark options with options set through resmoke's command line.
resmoke_benchrun_options = {"dbpath": self.dbpath, "time": _config.BENCHMARK_MIN_TIME}
- for key, value in resmoke_benchrun_options.items():
+ for key, value in list(resmoke_benchrun_options.items()):
if value is not None:
# 4. sanitize options before passing them to Benchmark's command line.
if key == "time":
@@ -104,7 +102,7 @@ class BenchrunEmbeddedTestCase( # pylint: disable=too-many-instance-attributes
def run_test(self):
"""Run the test for specified number of iterations."""
- for iter_num in xrange(self.benchrun_repetitions):
+ for iter_num in range(self.benchrun_repetitions):
# Set the output file for each iteration.
local_report_path = self._report_path(iter_num)
device_report_path = self._device_report_path(iter_num)
@@ -134,8 +132,8 @@ class BenchrunEmbeddedTestCase( # pylint: disable=too-many-instance-attributes
def _report_dir(self):
"""Return the report directory. Reports are stored in <report_root>/<testname>/<thread>."""
- return os.path.join(self.report_root, self.short_name(), "thread{}".format(
- self.benchrun_threads))
+ return os.path.join(self.report_root, self.short_name(),
+ "thread{}".format(self.benchrun_threads))
@staticmethod
def _report_name(iter_num):
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
index df6d7c9fa41..c0313046a20 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_integration_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for C++ integration tests."""
-from __future__ import absolute_import
-
from . import interface
from ... import core
from ... import utils
diff --git a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
index b9fb427d0da..c54b45a7792 100644
--- a/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
+++ b/buildscripts/resmokelib/testing/testcases/cpp_unittest.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for C++ unit tests."""
-from __future__ import absolute_import
-
from . import interface
from ... import core
from ... import utils
diff --git a/buildscripts/resmokelib/testing/testcases/dbtest.py b/buildscripts/resmokelib/testing/testcases/dbtest.py
index 4cfbb8c6385..44ea8410c58 100644
--- a/buildscripts/resmokelib/testing/testcases/dbtest.py
+++ b/buildscripts/resmokelib/testing/testcases/dbtest.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for dbtests."""
-from __future__ import absolute_import
-
import os
import os.path
diff --git a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
index 6cbda33abe9..d184fe84fc5 100644
--- a/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
+++ b/buildscripts/resmokelib/testing/testcases/fsm_workload_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for FSM workloads."""
-from __future__ import absolute_import
-
import hashlib
import threading
@@ -97,5 +95,5 @@ class ParallelFSMWorkloadTestCase(FSMWorkloadTestCase):
"""Get an unique identifier for a workload group."""
uid = hashlib.md5()
for workload_name in sorted(selected_tests):
- uid.update(workload_name)
+ uid.update(workload_name.encode("utf-8"))
return uid.hexdigest()
diff --git a/buildscripts/resmokelib/testing/testcases/gennylib_test.py b/buildscripts/resmokelib/testing/testcases/gennylib_test.py
index 91200b6c6c1..96eb08c6cb5 100644
--- a/buildscripts/resmokelib/testing/testcases/gennylib_test.py
+++ b/buildscripts/resmokelib/testing/testcases/gennylib_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for gennylib integration tests."""
-from __future__ import absolute_import
-
from . import interface
from ... import core
from ... import utils
diff --git a/buildscripts/resmokelib/testing/testcases/gennytest.py b/buildscripts/resmokelib/testing/testcases/gennytest.py
index 1db9ca48e94..11e51014803 100644
--- a/buildscripts/resmokelib/testing/testcases/gennytest.py
+++ b/buildscripts/resmokelib/testing/testcases/gennytest.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for genny."""
-from __future__ import absolute_import
-
import os
import os.path
diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py
index 1f078828b1c..e1c1e2faa0c 100644
--- a/buildscripts/resmokelib/testing/testcases/interface.py
+++ b/buildscripts/resmokelib/testing/testcases/interface.py
@@ -3,8 +3,6 @@
This is used to perform the actual test case.
"""
-from __future__ import absolute_import
-
import os
import os.path
import unittest
@@ -23,11 +21,9 @@ def make_test_case(test_kind, *args, **kwargs):
return _TEST_CASES[test_kind](*args, **kwargs)
-class TestCase(unittest.TestCase): # pylint: disable=too-many-instance-attributes
+class TestCase(unittest.TestCase, metaclass=registry.make_registry_metaclass(_TEST_CASES)): # pylint: disable=too-many-instance-attributes
"""A test case to execute."""
- __metaclass__ = registry.make_registry_metaclass(_TEST_CASES) # type: ignore
-
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
def __init__(self, logger, test_kind, test_name, dynamic=False):
@@ -37,10 +33,10 @@ class TestCase(unittest.TestCase): # pylint: disable=too-many-instance-attribut
if not isinstance(logger, logging.Logger):
raise TypeError("logger must be a Logger instance")
- if not isinstance(test_kind, basestring):
+ if not isinstance(test_kind, str):
raise TypeError("test_kind must be a string")
- if not isinstance(test_name, basestring):
+ if not isinstance(test_name, str):
raise TypeError("test_name must be a string")
self._id = uuid.uuid4()
diff --git a/buildscripts/resmokelib/testing/testcases/json_schema_test.py b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
index 08e5a2d71a8..6f40b8af714 100644
--- a/buildscripts/resmokelib/testing/testcases/json_schema_test.py
+++ b/buildscripts/resmokelib/testing/testcases/json_schema_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for JSON Schema tests."""
-from __future__ import absolute_import
-
from buildscripts.resmokelib import config
from buildscripts.resmokelib import core
from buildscripts.resmokelib import utils
diff --git a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
index c2da41faf37..ece5e2ceb15 100644
--- a/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
+++ b/buildscripts/resmokelib/testing/testcases/jsrunnerfile.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for tests with a static JavaScript runner file."""
-from __future__ import absolute_import
-
from buildscripts.resmokelib import config
from buildscripts.resmokelib import core
from buildscripts.resmokelib import utils
diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py
index 4d36ac965b0..6b6a5ee45e2 100644
--- a/buildscripts/resmokelib/testing/testcases/jstest.py
+++ b/buildscripts/resmokelib/testing/testcases/jstest.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for JavaScript tests."""
-from __future__ import absolute_import
-
import os
import os.path
import sys
@@ -201,7 +199,7 @@ class JSTestCase(interface.ProcessTestCase):
test_cases = []
try:
# If there are multiple clients, make a new thread for each client.
- for thread_id in xrange(self.num_clients):
+ for thread_id in range(self.num_clients):
logger = self.logger.new_test_thread_logger(self.test_kind, str(thread_id))
test_case = self._create_test_case_for_thread(logger, thread_id)
test_cases.append(test_case)
diff --git a/buildscripts/resmokelib/testing/testcases/mongos_test.py b/buildscripts/resmokelib/testing/testcases/mongos_test.py
index 9914ba8677f..f00a93a9bdb 100644
--- a/buildscripts/resmokelib/testing/testcases/mongos_test.py
+++ b/buildscripts/resmokelib/testing/testcases/mongos_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for mongos --test."""
-from __future__ import absolute_import
-
from . import interface
from ... import config
from ... import core
diff --git a/buildscripts/resmokelib/testing/testcases/mql_model_haskell_test.py b/buildscripts/resmokelib/testing/testcases/mql_model_haskell_test.py
index 7911aa1b7d6..fff521ddae5 100644
--- a/buildscripts/resmokelib/testing/testcases/mql_model_haskell_test.py
+++ b/buildscripts/resmokelib/testing/testcases/mql_model_haskell_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for MQL Haskell tests."""
-from __future__ import absolute_import
-
import os
import os.path
diff --git a/buildscripts/resmokelib/testing/testcases/mql_model_mongod_test.py b/buildscripts/resmokelib/testing/testcases/mql_model_mongod_test.py
index eddfef2b7b3..249b0f18fb2 100644
--- a/buildscripts/resmokelib/testing/testcases/mql_model_mongod_test.py
+++ b/buildscripts/resmokelib/testing/testcases/mql_model_mongod_test.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for MQL MongoD Model tests."""
-from __future__ import absolute_import
-
import os
import os.path
diff --git a/buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py b/buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py
index 1e790612153..81ddfa289ae 100644
--- a/buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py
+++ b/buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py
@@ -1,7 +1,5 @@
"""unittest.TestCase for multi-statement transaction passthrough tests."""
-from __future__ import absolute_import
-
from buildscripts.resmokelib import config
from buildscripts.resmokelib import core
from buildscripts.resmokelib import utils
diff --git a/buildscripts/resmokelib/testing/testcases/sleeptest.py b/buildscripts/resmokelib/testing/testcases/sleeptest.py
index 163100095d3..4f073ee6f25 100644
--- a/buildscripts/resmokelib/testing/testcases/sleeptest.py
+++ b/buildscripts/resmokelib/testing/testcases/sleeptest.py
@@ -1,7 +1,5 @@
"""The unittest.TestCase for sleeping a given amount of time."""
-from __future__ import absolute_import
-
import time
from . import interface
diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py
index 5f60f162696..02681bca0b5 100644
--- a/buildscripts/resmokelib/utils/__init__.py
+++ b/buildscripts/resmokelib/utils/__init__.py
@@ -1,8 +1,5 @@
"""Helper functions."""
-from __future__ import absolute_import
-from __future__ import print_function
-
import contextlib
import os.path
import shutil
@@ -40,19 +37,7 @@ def default_if_none(value, default):
def rmtree(path, **kwargs):
- """Wrap shutil.rmtreee.
-
- Use a UTF-8 unicode path if Windows.
- See https://bugs.python.org/issue24672, where shutil.rmtree can fail with UTF-8.
- Use a bytes path to rmtree, otherwise.
- See https://github.com/pypa/setuptools/issues/706.
- """
- if is_windows():
- if not isinstance(path, unicode):
- path = unicode(path, "utf-8")
- else:
- if isinstance(path, unicode):
- path = path.encode("utf-8")
+ """Wrap shutil.rmtree."""
shutil.rmtree(path, **kwargs)
@@ -72,12 +57,12 @@ def remove_if_exists(path):
def is_string_list(lst):
"""Return true if 'lst' is a list of strings, and false otherwise."""
- return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst)
+ return isinstance(lst, list) and all(isinstance(x, str) for x in lst)
def is_string_set(value):
"""Return true if 'value' is a set of strings, and false otherwise."""
- return isinstance(value, set) and all(isinstance(x, basestring) for x in value)
+ return isinstance(value, set) and all(isinstance(x, str) for x in value)
def is_js_file(filename):
diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py
index 8ccb3127f47..a31f8fd2d31 100644
--- a/buildscripts/resmokelib/utils/archival.py
+++ b/buildscripts/resmokelib/utils/archival.py
@@ -1,8 +1,6 @@
"""Archival utility."""
-from __future__ import absolute_import
-
-import Queue
+import queue
import collections
import json
import math
@@ -45,7 +43,7 @@ def file_list_size(files):
def directory_size(directory):
"""Return size (in bytes) of files in 'directory' tree."""
dir_bytes = 0
- for root_dir, _, files in os.walk(unicode(directory)):
+ for root_dir, _, files in os.walk(str(directory)):
for name in files:
full_name = os.path.join(root_dir, name)
try:
@@ -103,7 +101,7 @@ class Archival(object): # pylint: disable=too-many-instance-attributes
self._lock = threading.Lock()
# Start the worker thread to update the 'archival_json_file'.
- self._archive_file_queue = Queue.Queue()
+ self._archive_file_queue = queue.Queue()
self._archive_file_worker = threading.Thread(target=self._update_archive_file_wkr,
args=(self._archive_file_queue,
logger), name="archive_file_worker")
@@ -115,10 +113,10 @@ class Archival(object): # pylint: disable=too-many-instance-attributes
self.s3_client = s3_client
# Start the worker thread which uploads the archive.
- self._upload_queue = Queue.Queue()
- self._upload_worker = threading.Thread(target=self._upload_to_s3_wkr,
- args=(self._upload_queue, self._archive_file_queue,
- logger, self.s3_client), name="upload_worker")
+ self._upload_queue = queue.Queue()
+ self._upload_worker = threading.Thread(
+ target=self._upload_to_s3_wkr, args=(self._upload_queue, self._archive_file_queue,
+ logger, self.s3_client), name="upload_worker")
self._upload_worker.setDaemon(True)
self._upload_worker.start()
@@ -161,14 +159,14 @@ class Archival(object): # pylint: disable=too-many-instance-attributes
return status, message
@staticmethod
- def _update_archive_file_wkr(queue, logger):
- """Worker thread: Update the archival JSON file from 'queue'."""
+ def _update_archive_file_wkr(work_queue, logger):
+ """Worker thread: Update the archival JSON file from 'work_queue'."""
archival_json = []
while True:
- archive_args = queue.get()
+ archive_args = work_queue.get()
# Exit worker thread when sentinel is received.
if archive_args is None:
- queue.task_done()
+ work_queue.task_done()
break
archival_record = {
"name": archive_args.display_name, "link": archive_args.remote_file,
@@ -179,17 +177,17 @@ class Archival(object): # pylint: disable=too-many-instance-attributes
archival_json.append(archival_record)
with open(archive_args.archival_file, "w") as archival_fh:
json.dump(archival_json, archival_fh)
- queue.task_done()
+ work_queue.task_done()
@staticmethod
- def _upload_to_s3_wkr(queue, archive_file_queue, logger, s3_client):
- """Worker thread: Upload to S3 from 'queue', dispatch to 'archive_file_queue'."""
+ def _upload_to_s3_wkr(work_queue, archive_file_work_queue, logger, s3_client):
+ """Worker thread: Upload to S3 from 'work_queue', dispatch to 'archive_file_work_queue'."""
while True:
- upload_args = queue.get()
+ upload_args = work_queue.get()
# Exit worker thread when sentinel is received.
if upload_args is None:
- queue.task_done()
- archive_file_queue.put(None)
+ work_queue.task_done()
+ archive_file_work_queue.put(None)
break
extra_args = {"ContentType": upload_args.content_type, "ACL": "public-read"}
logger.debug("Uploading to S3 %s to bucket %s path %s", upload_args.local_file,
@@ -212,10 +210,10 @@ class Archival(object): # pylint: disable=too-many-instance-attributes
remote_file = "https://s3.amazonaws.com/{}/{}".format(upload_args.s3_bucket,
upload_args.s3_path)
if upload_completed:
- archive_file_queue.put(
+ archive_file_work_queue.put(
ArchiveArgs(upload_args.archival_file, upload_args.display_name, remote_file))
- queue.task_done()
+ work_queue.task_done()
def _archive_files(self, display_name, input_files, s3_bucket, s3_path):
"""
diff --git a/buildscripts/resmokelib/utils/autoloader.py b/buildscripts/resmokelib/utils/autoloader.py
index 73b58563451..ab1ace84798 100644
--- a/buildscripts/resmokelib/utils/autoloader.py
+++ b/buildscripts/resmokelib/utils/autoloader.py
@@ -1,7 +1,5 @@
"""Utility for loading all modules within a package."""
-from __future__ import absolute_import
-
import importlib
import pkgutil
diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py
index 1e016875f94..5857870e627 100644
--- a/buildscripts/resmokelib/utils/globstar.py
+++ b/buildscripts/resmokelib/utils/globstar.py
@@ -1,7 +1,5 @@
"""Filename globbing utility."""
-from __future__ import absolute_import
-
import glob as _glob
import os
import os.path
@@ -134,7 +132,7 @@ def _list_dir(pathname):
"""
try:
- (_root, dirs, files) = os.walk(pathname).next()
+ (_root, dirs, files) = next(os.walk(pathname))
return (dirs, files)
except StopIteration:
return None # 'pathname' directory does not exist
diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py
index 67758197c5c..21d1cfa783c 100644
--- a/buildscripts/resmokelib/utils/jscomment.py
+++ b/buildscripts/resmokelib/utils/jscomment.py
@@ -1,13 +1,11 @@
"""Utility for parsing JS comments."""
-from __future__ import absolute_import
-
import re
import yaml
# TODO: use a more robust regular expression for matching tags
-_JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
+_JSTEST_TAGS_RE = re.compile(b".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
def get_tags(pathname):
@@ -29,19 +27,19 @@ def get_tags(pathname):
*/
"""
- with open(pathname) as fp:
+ with open(pathname, 'rb') as fp:
match = _JSTEST_TAGS_RE.match(fp.read())
if match:
try:
# TODO: it might be worth supporting the block (indented) style of YAML lists in
# addition to the flow (bracketed) style
tags = yaml.safe_load(_strip_jscomments(match.group(1)))
- if not isinstance(tags, list) and all(isinstance(tag, basestring) for tag in tags):
+ if not isinstance(tags, list) and all(isinstance(tag, str) for tag in tags):
raise TypeError("Expected a list of string tags, but got '%s'" % (tags))
return tags
except yaml.YAMLError as err:
- raise ValueError("File '%s' contained invalid tags (expected YAML): %s" % (pathname,
- err))
+ raise ValueError(
+ "File '%s' contained invalid tags (expected YAML): %s" % (pathname, err))
return []
@@ -68,6 +66,9 @@ def _strip_jscomments(string):
yaml_lines = []
+ if isinstance(string, bytes):
+ string = string.decode("utf-8")
+
for line in string.splitlines():
# Remove leading whitespace and symbols that commonly appear in JS comments.
line = line.lstrip("\t ").lstrip("*/")
diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py
index c77692138b1..90c57408621 100644
--- a/buildscripts/resmokelib/utils/queue.py
+++ b/buildscripts/resmokelib/utils/queue.py
@@ -6,16 +6,14 @@ in order for KeyboardInterrupt exceptions to get propagated.
See https://bugs.python.org/issue1167930 for more details.
"""
-from __future__ import absolute_import
-
-import Queue as _Queue
+import queue as _queue
import time
# Exception that is raised when get_nowait() is called on an empty Queue.
-Empty = _Queue.Empty
+Empty = _queue.Empty
-class Queue(_Queue.Queue):
+class Queue(_queue.Queue):
"""A multi-producer, multi-consumer queue."""
def join(self, timeout=None): # pylint: disable=arguments-differ
diff --git a/buildscripts/resmokelib/utils/registry.py b/buildscripts/resmokelib/utils/registry.py
index 0aa02f4b2b5..4248b8c38b3 100644
--- a/buildscripts/resmokelib/utils/registry.py
+++ b/buildscripts/resmokelib/utils/registry.py
@@ -6,8 +6,6 @@ This pattern enables the associated class to be looked up later by using
its name.
"""
-from __future__ import absolute_import
-
# Specifying 'LEAVE_UNREGISTERED' as the "REGISTERED_NAME" attribute will cause the class to be
# omitted from the registry. This is particularly useful for base classes that define an interface
# or common functionality, and aren't intended to be constructed explicitly.
@@ -23,7 +21,7 @@ def make_registry_metaclass(registry_store):
class Registry(type):
"""A metaclass that stores a reference to all registered classes."""
- def __new__(mcs, class_name, base_classes, class_dict):
+ def __new__(mcs, class_name, base_classes, class_dict): # pylint: disable=bad-mcs-classmethod-argument
"""Create and returns a new instance of Registry.
The registry is a class named 'class_name' derived from 'base_classes'
@@ -46,9 +44,9 @@ def make_registry_metaclass(registry_store):
if registered_name is not LEAVE_UNREGISTERED:
if registered_name in registry_store:
- raise ValueError("The name %s is already registered; a different value for the"
- " 'REGISTERED_NAME' attribute must be chosen" %
- (registered_name))
+ raise ValueError(
+ "The name %s is already registered; a different value for the"
+ " 'REGISTERED_NAME' attribute must be chosen" % (registered_name))
registry_store[registered_name] = cls
return cls
diff --git a/buildscripts/resmokelib/utils/scheduler.py b/buildscripts/resmokelib/utils/scheduler.py
index 04abafcd330..9f57d0a110e 100644
--- a/buildscripts/resmokelib/utils/scheduler.py
+++ b/buildscripts/resmokelib/utils/scheduler.py
@@ -1,7 +1,5 @@
"""Thread-safe version of sched.scheduler; the class wasn't made thread-safe until Python 3.3."""
-from __future__ import absolute_import
-
import heapq
import sched
import threading
diff --git a/buildscripts/scons.py b/buildscripts/scons.py
index 5dde43facfe..7c07de80f7b 100755
--- a/buildscripts/scons.py
+++ b/buildscripts/scons.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python2
"""Scons module."""
-from __future__ import print_function
-
import os
import sys
diff --git a/buildscripts/scons_cache_prune.py b/buildscripts/scons_cache_prune.py
index dc6b520d6da..b63fd48ca30 100644
--- a/buildscripts/scons_cache_prune.py
+++ b/buildscripts/scons_cache_prune.py
@@ -38,8 +38,9 @@ def collect_cache_contents(cache_path):
for file_name in os.listdir(path):
file_path = os.path.join(path, file_name)
if os.path.isdir(file_path):
- LOGGER.warning("cache item %s is a directory and not a file. "
- "The cache may be corrupt.", file_path)
+ LOGGER.warning(
+ "cache item %s is a directory and not a file. "
+ "The cache may be corrupt.", file_path)
continue
try:
@@ -113,9 +114,10 @@ def main():
parser.add_argument("--cache-dir", "-d", default=None, help="path to the cache directory.")
parser.add_argument("--cache-size", "-s", default=200, type=int,
help="maximum size of cache in GB.")
- parser.add_argument("--prune-ratio", "-p", default=0.8, type=float,
- help=("ratio (as 1.0 > x > 0) of total cache size to prune "
- "to when cache exceeds quota."))
+ parser.add_argument(
+ "--prune-ratio", "-p", default=0.8, type=float,
+ help=("ratio (as 1.0 > x > 0) of total cache size to prune "
+ "to when cache exceeds quota."))
parser.add_argument("--print-cache-dir", default=False, action="store_true")
args = parser.parse_args()
diff --git a/buildscripts/setup_multiversion_mongodb.py b/buildscripts/setup_multiversion_mongodb.py
index 7b5c5ca9e57..82abbe7574a 100755
--- a/buildscripts/setup_multiversion_mongodb.py
+++ b/buildscripts/setup_multiversion_mongodb.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Install multiple versions of MongoDB on a machine."""
-from __future__ import print_function
-
import contextlib
import errno
import json
@@ -16,7 +14,7 @@ import tarfile
import tempfile
import threading
import traceback
-import urlparse
+import urllib.parse
import zipfile
import requests
@@ -32,7 +30,7 @@ def dump_stacks(_signal_num, _frame): # pylint: disable=unused-argument
print("Total Threads: {:d}".format(len(threads)))
- for tid, stack in sys._current_frames().items(): # pylint: disable=protected-access
+ for tid, stack in list(sys._current_frames().items()): # pylint: disable=protected-access
print("Thread {:d}".format(tid))
print("".join(traceback.format_stack(stack)))
print("======================================")
@@ -205,7 +203,7 @@ class MultiVersionDownloader(object): # pylint: disable=too-many-instance-attri
urls = []
requested_version_parts = get_version_parts(version)
- for link_version, link_url in self.links.iteritems():
+ for link_version, link_url in self.links.items():
link_version_parts = get_version_parts(link_version)
if link_version_parts[:len(requested_version_parts)] == requested_version_parts:
# The 'link_version' is a candidate for the requested 'version' if
@@ -222,9 +220,10 @@ class MultiVersionDownloader(object): # pylint: disable=too-many-instance-attri
urls.append((link_version, link_url))
if not urls:
- print("Cannot find a link for version {}, versions {} found.".format(
- version, self.links), file=sys.stderr)
- for ver, generic_url in self.generic_links.iteritems():
+ print(
+ "Cannot find a link for version {}, versions {} found.".format(version, self.links),
+ file=sys.stderr)
+ for ver, generic_url in self.generic_links.items():
parts = get_version_parts(ver)
if parts[:len(requested_version_parts)] == requested_version_parts:
if "-" in version and ver != version:
@@ -236,11 +235,11 @@ class MultiVersionDownloader(object): # pylint: disable=too-many-instance-attri
else:
print("Falling back to generic architecture.")
- urls.sort(key=lambda (version, _): get_version_parts(version, for_sorting=True))
+ urls.sort(key=lambda link: get_version_parts(link[0], for_sorting=True))
full_version = urls[-1][0]
url = urls[-1][1]
extract_dir = url.split("/")[-1][:-4]
- file_suffix = os.path.splitext(urlparse.urlparse(url).path)[1]
+ file_suffix = os.path.splitext(urllib.parse.urlparse(url).path)[1]
# Only download if we don't already have the directory.
# Note, we cannot detect if 'latest' has already been downloaded, as the name
@@ -402,25 +401,30 @@ we'll pull the highest non-rc version compatible with the version specified.
parser.add_option("-i", "--installDir", dest="install_dir",
help="Directory to install the download archive. [REQUIRED]", default=None)
- parser.add_option("-l", "--linkDir", dest="link_dir",
- help=("Directory to contain links to all binaries for each version in"
- " the install directory. [REQUIRED]"), default=None)
+ parser.add_option(
+ "-l", "--linkDir", dest="link_dir",
+ help=("Directory to contain links to all binaries for each version in"
+ " the install directory. [REQUIRED]"), default=None)
editions = ["base", "enterprise", "targeted"]
- parser.add_option("-e", "--edition", dest="edition", choices=editions,
- help=("Edition of the build to download, choose from {}, [default:"
- " '%default'].".format(editions)), default="base")
- parser.add_option("-p", "--platform", dest="platform",
- help=("Platform to download [REQUIRED]. Examples include: 'linux',"
- " 'osx', 'rhel62', 'windows'."), default=None)
- parser.add_option("-a", "--architecture", dest="architecture",
- help=("Architecture to download, [default: '%default']. Examples include:"
- " 'arm64', 'ppc64le', 's390x' and 'x86_64'."), default="x86_64")
- parser.add_option("-u", "--useLatest", dest="use_latest", action="store_true",
- help=("If specified, the latest (nightly) version will be downloaded,"
- " if it exists, for the version specified. For example, if specifying"
- " version 3.2 for download, the nightly version for 3.2 will be"
- " downloaded if it exists, otherwise the 'highest' version will be"
- " downloaded, i.e., '3.2.17'"), default=False)
+ parser.add_option(
+ "-e", "--edition", dest="edition", choices=editions,
+ help=("Edition of the build to download, choose from {}, [default:"
+ " '%default'].".format(editions)), default="base")
+ parser.add_option(
+ "-p", "--platform", dest="platform",
+ help=("Platform to download [REQUIRED]. Examples include: 'linux',"
+ " 'osx', 'rhel62', 'windows'."), default=None)
+ parser.add_option(
+ "-a", "--architecture", dest="architecture",
+ help=("Architecture to download, [default: '%default']. Examples include:"
+ " 'arm64', 'ppc64le', 's390x' and 'x86_64'."), default="x86_64")
+ parser.add_option(
+ "-u", "--useLatest", dest="use_latest", action="store_true",
+ help=("If specified, the latest (nightly) version will be downloaded,"
+ " if it exists, for the version specified. For example, if specifying"
+ " version 3.2 for download, the nightly version for 3.2 will be"
+ " downloaded if it exists, otherwise the 'highest' version will be"
+ " downloaded, i.e., '3.2.17'"), default=False)
options, versions = parser.parse_args()
diff --git a/buildscripts/tests/ciconfig/test_evergreen.py b/buildscripts/tests/ciconfig/test_evergreen.py
index fd57a449f79..f277bb6564a 100644
--- a/buildscripts/tests/ciconfig/test_evergreen.py
+++ b/buildscripts/tests/ciconfig/test_evergreen.py
@@ -1,7 +1,5 @@
"""Unit tests for the buildscripts.ciconfig.evergreen module."""
-from __future__ import absolute_import
-
import datetime
import os
import unittest
@@ -122,12 +120,13 @@ class TestTask(unittest.TestCase): # pylint: disable=too-many-public-methods
def test_run_tests_multiversion(self):
multiversion_path = "/data/multiversion"
- task_commands = [{"func": "do multiversion setup"}, {
- "func": "run tests", "vars": {
- "task_path_suffix": multiversion_path,
- "resmoke_args": "--suites=core --shellWriteMode=commands"
- }
- }]
+ task_commands = [{"func": "do multiversion setup"},
+ {
+ "func": "run tests", "vars": {
+ "task_path_suffix": multiversion_path,
+ "resmoke_args": "--suites=core --shellWriteMode=commands"
+ }
+ }]
task_dict = {"name": "jsCore", "commands": task_commands}
task = _evergreen.Task(task_dict)
diff --git a/buildscripts/tests/ciconfig/test_tags.py b/buildscripts/tests/ciconfig/test_tags.py
index a60781506b4..007124a5869 100644
--- a/buildscripts/tests/ciconfig/test_tags.py
+++ b/buildscripts/tests/ciconfig/test_tags.py
@@ -1,5 +1,4 @@
"""Unit tests for the buildscripts.ciconfig.tags module."""
-from __future__ import absolute_import
import os
import unittest
@@ -157,7 +156,9 @@ class TestTagsConfig(unittest.TestCase):
test_pattern = "jstests/core/example.js"
def custom_cmp(tag_a, tag_b):
- return cmp(tag_a.split("|"), tag_b.split("|"))
+ a_split = tag_a.split("|")
+ b_split = tag_b.split("|")
+ return (a_split > b_split) - (a_split < b_split)
conf = _tags.TagsConfig.from_file(TEST_FILE_PATH, cmp_func=custom_cmp)
tags = conf.get_tags(test_kind, test_pattern)
diff --git a/buildscripts/tests/client/test_evergreen.py b/buildscripts/tests/client/test_evergreen.py
index d465361f37b..13a7b07aa08 100644
--- a/buildscripts/tests/client/test_evergreen.py
+++ b/buildscripts/tests/client/test_evergreen.py
@@ -1,7 +1,5 @@
"""Unit tests for the client.evergreen module."""
-from __future__ import absolute_import
-
import datetime
import unittest
diff --git a/buildscripts/tests/metrics/test_burn_in_tests.py b/buildscripts/tests/metrics/test_burn_in_tests.py
index 418698f53ce..efb39494f75 100644
--- a/buildscripts/tests/metrics/test_burn_in_tests.py
+++ b/buildscripts/tests/metrics/test_burn_in_tests.py
@@ -398,7 +398,7 @@ class TestWriteJsonFile(unittest.TestCase):
def test_write_json_file(self):
my_data = {"key1": "val1", "key_list": ["l1", "l2"]}
path = "myfile"
- with patch("__builtin__.open") as mock_file,\
+ with patch("builtins.open") as mock_file,\
patch("json.dump") as mock_json_dump:
burn_in.write_json_file(my_data, path)
mock_file.assert_called_once_with("myfile", "w")
@@ -813,71 +813,3 @@ class TestReport(unittest.TestCase):
def test__is_patch_build_completed_no_builds(self):
self.assertTrue(burn_in.Report._is_patch_build_completed([]))
-
-
-class TestMain(unittest.TestCase):
- def test_main(self):
- options = MagicMock()
- options.log_level = "NOTSET"
- options.evg_client_log_level = "NOTSET"
- options.days = 30000
- options.project = "myproject"
- projects = Projects(PROJECT_PATCHES)
- version_builds = VersionBuilds(VERSION_BUILDS)
- build_tasks = BuildTasks(BUILD_TASKS_WITH_BURN_IN)
- task_tests = TaskTests(TASKS_TESTS)
- with patch("argparse.ArgumentParser.parse_args", return_value=options),\
- patch(EVERGREEN + ".EvergreenApiV2.project_patches_gen", projects._project_patches_gen),\
- patch(EVERGREEN + ".EvergreenApiV2.version_builds", version_builds._version_builds),\
- patch(EVERGREEN + ".EvergreenApiV2.tasks_by_build_id", build_tasks._tasks_by_build_id),\
- patch(EVERGREEN + ".EvergreenApiV2.tests_by_task", task_tests._tests_by_task),\
- patch(BURN_IN + ".write_json_file") as mock_write_json:
- burn_in.main()
- report = mock_write_json.call_args_list[0][0][0]
- self.assertEqual(len(burn_in.REPORT_FIELDS) + 1, len(report))
- for field in burn_in.REPORT_FIELDS:
- self.assertIn(field, report)
- self.assertEqual(17, report["tasks"])
- self.assertEqual(12, report["tasks_succeeded"])
- self.assertEqual(5, report["tasks_failed"])
- self.assertEqual(3, report["tasks_failed_burn_in"])
- self.assertEqual(2, report["tasks_failed_only_burn_in"])
- self.assertEqual(6, report["burn_in_generated_tasks"])
- self.assertEqual(4, report["patch_builds_with_burn_in_task"])
- self.assertEqual(5, report["burn_in_tests"])
- self.assertEqual(2, report[burn_in.BURN_IN_TASKS_EXCEED])
- self.assertEqual("2019-01-01T00:00:00.000Z", report["report_start_time"])
- self.assertEqual("2019-04-01T00:00:00.000Z", report["report_end_time"])
-
- def test_main_nodata(self):
- options = MagicMock()
- options.log_level = "NOTSET"
- options.evg_client_log_level = "NOTSET"
- options.days = 30000
- options.project = "myproject"
- projects = Projects(PROJECT_PATCHES)
- version_builds = VersionBuilds([])
- build_tasks = BuildTasks([])
- task_tests = TaskTests([])
- with patch("argparse.ArgumentParser.parse_args", return_value=options),\
- patch(EVERGREEN + ".EvergreenApiV2.project_patches_gen", projects._project_patches_gen),\
- patch(EVERGREEN + ".EvergreenApiV2.version_builds", version_builds._version_builds),\
- patch(EVERGREEN + ".EvergreenApiV2.tasks_by_build_id", build_tasks._tasks_by_build_id),\
- patch(EVERGREEN + ".EvergreenApiV2.tests_by_task", task_tests._tests_by_task),\
- patch(BURN_IN + ".write_json_file") as mock_write_json:
- burn_in.main()
- report = mock_write_json.call_args_list[0][0][0]
- self.assertEqual(len(burn_in.REPORT_FIELDS) + 1, len(report))
- for field in burn_in.REPORT_FIELDS:
- self.assertIn(field, report)
- self.assertEqual(0, report["tasks"])
- self.assertEqual(0, report["tasks_succeeded"])
- self.assertEqual(0, report["tasks_failed"])
- self.assertEqual(0, report["tasks_failed_burn_in"])
- self.assertEqual(0, report["tasks_failed_only_burn_in"])
- self.assertEqual(0, report["burn_in_generated_tasks"])
- self.assertEqual(0, report["patch_builds_with_burn_in_task"])
- self.assertEqual(0, report["burn_in_tests"])
- self.assertEqual(0, report[burn_in.BURN_IN_TASKS_EXCEED])
- self.assertIsNone(report["report_start_time"])
- self.assertIsNone(report["report_end_time"])
diff --git a/buildscripts/tests/mobile/test_adb_monitor.py b/buildscripts/tests/mobile/test_adb_monitor.py
index c6680d6dd67..3520ea98380 100644
--- a/buildscripts/tests/mobile/test_adb_monitor.py
+++ b/buildscripts/tests/mobile/test_adb_monitor.py
@@ -1,7 +1,5 @@
""" Unit tests for adb_monitor. """
-from __future__ import absolute_import
-
import distutils.spawn # pylint: disable=no-name-in-module
import os
import shutil
@@ -37,7 +35,7 @@ def mock_adb_and_systrace(directory):
systrace = os.path.join(systrace_dir, "systrace.py")
with open(systrace, "w") as fh:
fh.write("import optparse\n")
- fh.write("raw_input('waiting...')\n")
+ fh.write("input('waiting...')\n")
fh.write("print('Wrote trace')\n")
fh.write("parser = optparse.OptionParser()\n")
fh.write("parser.add_option('-o', dest='output_file')\n")
diff --git a/buildscripts/tests/resmokelib/logging/test_buildlogger.py b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
index b57ec041f30..5cb8f783c9d 100644
--- a/buildscripts/tests/resmokelib/logging/test_buildlogger.py
+++ b/buildscripts/tests/resmokelib/logging/test_buildlogger.py
@@ -1,7 +1,5 @@
"""Unit tests for the buildscripts.resmokelib.logging.buildlogger module."""
-from __future__ import absolute_import
-
import json
import unittest
@@ -54,7 +52,7 @@ class TestLogsSplitter(unittest.TestCase):
# The size of [ "x" ] is 5. This is the minimum size we generate.
self.assertTrue(size >= 5)
# Each new "x" adds 5 to the size.
- nb_lines = size / 5
+ nb_lines = int(size / 5)
# Each additional "x" on a line adds 1 to the size.
last_line_extra = size % 5
logs = ["x"] * nb_lines
@@ -65,4 +63,4 @@ class TestLogsSplitter(unittest.TestCase):
@staticmethod
def size(logs):
"""Returns the size of the log lines when represented in JSON."""
- return len(json.dumps(logs, encoding="utf-8"))
+ return len(json.dumps(logs))
diff --git a/buildscripts/tests/resmokelib/test_selector.py b/buildscripts/tests/resmokelib/test_selector.py
index 639d6f74ea6..a37147726ab 100644
--- a/buildscripts/tests/resmokelib/test_selector.py
+++ b/buildscripts/tests/resmokelib/test_selector.py
@@ -1,7 +1,5 @@
"""Unit tests for the buildscripts.resmokelib.selector module."""
-from __future__ import absolute_import
-
import fnmatch
import os.path
import unittest
@@ -194,7 +192,7 @@ class TestTestList(unittest.TestCase):
def test_roots_unknown_file(self):
roots = ["dir/subdir1/unknown"]
- with self.assertRaisesRegexp(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
+ with self.assertRaisesRegex(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
selector._TestList(self.test_file_explorer, roots, tests_are_files=True)
def test_include_files(self):
@@ -225,7 +223,7 @@ class TestTestList(unittest.TestCase):
def test_exclude_files_no_match(self):
roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
test_list = selector._TestList(self.test_file_explorer, roots)
- with self.assertRaisesRegexp(ValueError, "Unrecognized test file: .*$"):
+ with self.assertRaisesRegex(ValueError, "Unrecognized test file: .*$"):
test_list.exclude_files(["dir/subdir2/test26.js"])
def test_exclude_files_glob(self):
@@ -354,8 +352,8 @@ class TestSelector(unittest.TestCase):
def test_select_exclude_files(self):
config = selector._SelectorConfig(
- roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"], exclude_files=["dir/subdir2/test21.js"])
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ exclude_files=["dir/subdir2/test21.js"])
selected, excluded = self.selector.select(config)
self.assertEqual(
["dir/subdir1/test11.js", "dir/subdir1/test12.js", "dir/subdir3/a/test3a1.js"],
@@ -364,8 +362,8 @@ class TestSelector(unittest.TestCase):
def test_select_include_files(self):
config = selector._SelectorConfig(
- roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"], include_files=["dir/subdir2/test21.js"])
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ include_files=["dir/subdir2/test21.js"])
selected, excluded = self.selector.select(config)
self.assertEqual(["dir/subdir2/test21.js"], selected)
self.assertEqual(
@@ -374,8 +372,8 @@ class TestSelector(unittest.TestCase):
def test_select_include_tags(self):
config = selector._SelectorConfig(
- roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"], include_tags="tag1")
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ include_tags="tag1")
selected, excluded = self.selector.select(config)
self.assertEqual([], selected)
self.assertEqual([
@@ -385,8 +383,8 @@ class TestSelector(unittest.TestCase):
def test_select_include_any_tags(self):
config = selector._SelectorConfig(
- roots=["dir/subdir1/*.js", "dir/subdir2/*.js",
- "dir/subdir3/a/*.js"], include_with_any_tags=["tag1"])
+ roots=["dir/subdir1/*.js", "dir/subdir2/*.js", "dir/subdir3/a/*.js"],
+ include_with_any_tags=["tag1"])
selected, excluded = self.selector.select(config)
self.assertEqual([], selected)
self.assertEqual([
@@ -412,8 +410,8 @@ class TestMultiJSSelector(unittest.TestCase):
total += 3
self.assertLessEqual(
- len(selected[-1]), 3, "Last selected group did not have 3 or fewer tests: {}".format(
- selected[-1]))
+ len(selected[-1]), 3,
+ "Last selected group did not have 3 or fewer tests: {}".format(selected[-1]))
total += len(selected[-1])
self.assertEqual(total, MockTestFileExplorer.NUM_JS_FILES * config.group_count_multiplier,
@@ -557,7 +555,7 @@ class TestFilterTests(unittest.TestCase):
def test_jstest_unknown_file(self):
config = {"roots": ["dir/subdir1/*.js", "dir/subdir1/unknown"]}
- with self.assertRaisesRegexp(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
+ with self.assertRaisesRegex(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
selector.filter_tests("js_test", config, self.test_file_explorer)
def test_json_schema_exclude_files(self):
diff --git a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
index 8077357a9a9..2dfd063337e 100755
--- a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
+++ b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchmark_results.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Unit tests for the resmokelib.testing.hooks.combine_benchmark_results module."""
-from __future__ import absolute_import
-
import datetime
import unittest
@@ -76,7 +74,7 @@ class TestCombineBenchmarkResults(CombineBenchmarkResultsFixture):
def test_generate_reports(self):
report = self.cbr_hook._generate_perf_plugin_report()
- self.assertEqual(len(report.keys()), 4)
+ self.assertEqual(len(list(report.keys())), 4)
self.assertEqual(len(report["results"]), 2)
self.assertDictEqual(report["results"][0]["context"], _BM_CONTEXT)
@@ -124,13 +122,13 @@ class TestBenchmarkThreadsReport(CombineBenchmarkResultsFixture):
self.bm_threads_report.parse_bm_name(_BM_MULTITHREAD_REPORT["name"]),
_BM_MULTITHREAD_REPORT)
- self.assertEqual(len(self.bm_threads_report.thread_benchmark_map.keys()), 1)
+ self.assertEqual(len(list(self.bm_threads_report.thread_benchmark_map.keys())), 1)
report = self.bm_threads_report.generate_perf_plugin_dict()
- self.assertEqual(len(report.keys()), 1)
- self.assertIn("10", report.keys())
- self.assertNotIn("10_median", report.keys())
+ self.assertEqual(len(list(report.keys())), 1)
+ self.assertIn("10", list(report.keys()))
+ self.assertNotIn("10_median", list(report.keys()))
self.assertEqual(len(report["10"]["error_values"]), 1)
self.assertEqual(len(report["10"]["ops_per_sec_values"]), 1)
@@ -143,10 +141,10 @@ class TestBenchmarkThreadsReport(CombineBenchmarkResultsFixture):
self.bm_threads_report.add_report(
self.bm_threads_report.parse_bm_name(_BM_REPORT_2["name"]), _BM_REPORT_2)
- self.assertEqual(len(self.bm_threads_report.thread_benchmark_map.keys()), 1)
+ self.assertEqual(len(list(self.bm_threads_report.thread_benchmark_map.keys())), 1)
report = self.bm_threads_report.generate_perf_plugin_dict()
- self.assertEqual(len(report.keys()), 1)
- self.assertIn("1", report.keys())
- self.assertNotIn("1_mean", report.keys())
+ self.assertEqual(len(list(report.keys())), 1)
+ self.assertIn("1", list(report.keys()))
+ self.assertNotIn("1_mean", list(report.keys()))
diff --git a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchrun_embedded_results.py b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchrun_embedded_results.py
index 02248602f4b..a28419a1fc1 100755
--- a/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchrun_embedded_results.py
+++ b/buildscripts/tests/resmokelib/testing/hooks/test_combine_benchrun_embedded_results.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Unit tests for the resmokelib.testing.hooks.combine_benchrun_embedded_results module."""
-from __future__ import absolute_import
-
import datetime
import os
import unittest
@@ -112,7 +110,7 @@ class TestCombineBenchmarkResults(CombineBenchrunEmbeddedResultsFixture):
test_name = "test_cber2"
thread_num = "1"
self._setup_reports(_BM_ALL_REPORTS, test_name, thread_num)
- self.assertEqual(len(self.report.keys()), 4)
+ self.assertEqual(len(list(self.report.keys())), 4)
report_0 = self.report["results"][0]
self.assertEqual(report_0["name"], test_name)
self.assertEqual(report_0["results"][thread_num]["ops_per_sec"], self.ops_per_sec)
@@ -135,13 +133,13 @@ class TestBenchrunEmbeddedThreadsReport(CombineBenchrunEmbeddedResultsFixture):
thread_num = "1"
thread_report.add_report(thread_num, _BM_REPORT_INSERT_1)
perf_report = thread_report.generate_perf_plugin_dict()
- self.assertEqual(len(perf_report.keys()), 1)
+ self.assertEqual(len(list(perf_report.keys())), 1)
self.assertEqual(perf_report[thread_num]["ops_per_sec"], _BM_REPORT_INSERT_1["totalOps/s"])
self.assertEqual(len(perf_report[thread_num]["ops_per_sec_values"]), 1)
thread_report.add_report(thread_num, _BM_REPORT_INSERT_2)
perf_report = thread_report.generate_perf_plugin_dict()
- self.assertEqual(len(perf_report.keys()), 1)
+ self.assertEqual(len(list(perf_report.keys())), 1)
ops_per_sec = (_BM_REPORT_INSERT_1["totalOps/s"] + _BM_REPORT_INSERT_2["totalOps/s"]) / 2
self.assertEqual(perf_report[thread_num]["ops_per_sec"], ops_per_sec)
self.assertEqual(len(perf_report[thread_num]["ops_per_sec_values"]), 2)
@@ -151,14 +149,14 @@ class TestBenchrunEmbeddedThreadsReport(CombineBenchrunEmbeddedResultsFixture):
thread_num = "1"
thread_report.add_report(thread_num, _BM_REPORT_INSERT_1)
perf_report = thread_report.generate_perf_plugin_dict()
- self.assertEqual(len(perf_report.keys()), 1)
+ self.assertEqual(len(list(perf_report.keys())), 1)
self.assertEqual(perf_report[thread_num]["ops_per_sec"], _BM_REPORT_INSERT_1["totalOps/s"])
self.assertEqual(len(perf_report[thread_num]["ops_per_sec_values"]), 1)
thread_num = "2"
thread_report.add_report(thread_num, _BM_REPORT_INSERT_2)
perf_report = thread_report.generate_perf_plugin_dict()
- self.assertEqual(len(perf_report.keys()), 2)
+ self.assertEqual(len(list(perf_report.keys())), 2)
self.assertEqual(perf_report["1"]["ops_per_sec"], _BM_REPORT_INSERT_1["totalOps/s"])
self.assertEqual(len(perf_report["1"]["ops_per_sec_values"]), 1)
self.assertEqual(perf_report[thread_num]["ops_per_sec"], _BM_REPORT_INSERT_2["totalOps/s"])
diff --git a/buildscripts/tests/resmokelib/testing/test_job.py b/buildscripts/tests/resmokelib/testing/test_job.py
index fb4768b275b..3ee85a7a74a 100644
--- a/buildscripts/tests/resmokelib/testing/test_job.py
+++ b/buildscripts/tests/resmokelib/testing/test_job.py
@@ -1,5 +1,4 @@
"""Unit tests for the resmokelib.testing.executor module."""
-from __future__ import division
import logging
import time
diff --git a/buildscripts/tests/resmokelib/utils/test_archival.py b/buildscripts/tests/resmokelib/utils/test_archival.py
index 43a63fa17c6..5916c9768b3 100644
--- a/buildscripts/tests/resmokelib/utils/test_archival.py
+++ b/buildscripts/tests/resmokelib/utils/test_archival.py
@@ -1,7 +1,5 @@
""" Unit tests for archival. """
-from __future__ import absolute_import
-
import logging
import os
import random
@@ -19,7 +17,7 @@ _BUCKET = "mongodatafiles"
def create_random_file(file_name, num_chars_mb):
""" Creates file with random characters, which will have minimal compression. """
with open(file_name, "wb") as fileh:
- for _ in xrange(num_chars_mb * 1024 * 1024):
+ for _ in range(num_chars_mb * 1024 * 1024):
fileh.write(chr(random.randint(0, 255)))
@@ -29,15 +27,12 @@ class MockS3Client(object):
def __init__(self, logger):
self.logger = logger
self.logger.info("MockS3Client init")
- return
def upload_file(self, *args, **kwargs):
self.logger.info("MockS3Client upload_file %s %s", args, kwargs)
- return
def delete_object(self, *args, **kwargs):
self.logger.info("MockS3Client delete_object %s %s", args, kwargs)
- return
class ArchivalTestCase(unittest.TestCase):
@@ -144,7 +139,7 @@ class ArchivalFileTests(ArchivalTestCase):
temp_dir = tempfile.mkdtemp(dir=self.temp_dir)
s3_path = self.s3_path("unittest/directory_with_files.tgz")
# Create 10 empty files
- for _ in xrange(10):
+ for _ in range(10):
tempfile.mkstemp(dir=temp_dir)
status, message = self.archive.archive_files_to_s3(display_name, temp_dir, self.bucket,
s3_path)
@@ -154,7 +149,7 @@ class ArchivalFileTests(ArchivalTestCase):
temp_dir2 = tempfile.mkdtemp(dir=self.temp_dir)
s3_path = self.s3_path("unittest/directories_with_files.tgz")
# Create 10 empty files
- for _ in xrange(10):
+ for _ in range(10):
tempfile.mkstemp(dir=temp_dir2)
status, message = self.archive.archive_files_to_s3(display_name, [temp_dir, temp_dir2],
self.bucket, s3_path)
diff --git a/buildscripts/tests/resmokelib/utils/test_rmtree.py b/buildscripts/tests/resmokelib/utils/test_rmtree.py
index 1908395766c..1239558feb7 100644
--- a/buildscripts/tests/resmokelib/utils/test_rmtree.py
+++ b/buildscripts/tests/resmokelib/utils/test_rmtree.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
""" Unit tests for utils.rmtree. """
-from __future__ import absolute_import
-from __future__ import print_function
-
import os
import shutil
import sys
@@ -41,7 +38,7 @@ def string_for_ascii_filesystem_encoding(path):
Some file system encodings are set to ASCII if LANG=C or LC_ALL=C is specified.
"""
- if ascii_filesystemencoding() and isinstance(path, unicode):
+ if ascii_filesystemencoding() and isinstance(path, str):
return path.encode("utf-8")
return path
@@ -66,15 +63,15 @@ class RmtreeTestCase(unittest.TestCase):
def test_unicode(self):
# Unicode name
- self.do_test(u"unicode")
+ self.do_test("unicode")
def test_greek(self):
# Name with Greek
- self.do_test(string_for_ascii_filesystem_encoding(u"ελληνικά"))
+ self.do_test(string_for_ascii_filesystem_encoding("ελληνικά"))
def test_japanese(self):
# Name with Japanese
- self.do_test(string_for_ascii_filesystem_encoding(u"会社案"))
+ self.do_test(string_for_ascii_filesystem_encoding("会社案"))
class RmtreeFileTests(RmtreeTestCase):
@@ -104,23 +101,3 @@ class RmtreeDirectoryWithNonAsciiTests(RmtreeTestCase):
create_file(name)
os.chdir(self.temp_dir_root)
self.assertTrue(rmtree(name))
-
-
-class ShutilWindowsRmtreeFileTests(RmtreeFileTests):
- def do_test(self, file_name):
- """Execute file test that are known to fail in shutil.rmtree."""
- if not utils.is_windows():
- print("Skipping ShutilWindowsRmtreeFileTests on non-Windows platforms")
- return
- temp_dir = tempfile.mkdtemp(dir=self.temp_dir_root)
- os.chdir(temp_dir)
- create_file(file_name)
- os.chdir(self.temp_dir_root)
- with self.assertRaises(WindowsError): # pylint: disable=undefined-variable
- shutil.rmtree(temp_dir)
-
- def test_ascii(self):
- pass
-
- def test_unicode(self):
- pass
diff --git a/buildscripts/tests/test_aws_ec2.py b/buildscripts/tests/test_aws_ec2.py
index 3401ae569c8..ebcb22a5865 100755
--- a/buildscripts/tests/test_aws_ec2.py
+++ b/buildscripts/tests/test_aws_ec2.py
@@ -1,7 +1,5 @@
"""Unit test for buildscripts/aws_ec2.py."""
-from __future__ import absolute_import
-
import datetime
import unittest
@@ -23,8 +21,8 @@ class AwsEc2TestCase(unittest.TestCase): # pylint: disable=too-many-instance-at
self.security_groups = None
self.expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
self.tags = [{"Key": "expire-on", "Value": self.expire_dt.strftime("%Y-%m-%d %H:%M:%S")},
- {"Key": "Name",
- "Value": "Unittest AWS EC2 Launcher"}, {"Key": "owner", "Value": ""}]
+ {"Key": "Name", "Value": "Unittest AWS EC2 Launcher"},
+ {"Key": "owner", "Value": ""}]
def tearDown(self):
for instance in self.launched_instances:
@@ -129,7 +127,7 @@ class AwsEc2ControlStatus(AwsEc2TestCase):
code, ret = self.aws_ec2.control_instance(mode="status", image_id="bad_id")
self.assertNotEqual(0, code, ret)
- self.assertRegexpMatches(ret, "Invalid", ret)
+ self.assertRegex(ret, "Invalid", ret)
class AwsEc2ControlStart(AwsEc2TestCase):
diff --git a/buildscripts/tests/test_burn_in_tests.py b/buildscripts/tests/test_burn_in_tests.py
index 4f04795df04..d5b4e2c546a 100644
--- a/buildscripts/tests/test_burn_in_tests.py
+++ b/buildscripts/tests/test_burn_in_tests.py
@@ -618,7 +618,7 @@ class UpdateReportDataTests(unittest.TestCase):
pathname = "file_exists"
with patch("os.path.isfile", return_value=True),\
- patch("__builtin__.open", mock_open()),\
+ patch("builtins.open", mock_open()),\
patch("json.load", return_value=new_data):
burn_in._update_report_data(data, pathname, task2)
self.assertEqual(len(data["results"]), 4)
@@ -948,9 +948,8 @@ class FindChangedTests(unittest.TestCase):
NUM_COMMITS = 10
MOD_FILES = [os.path.normpath("jstests/test1.js"), os.path.normpath("jstests/test2.js")]
- REV_DIFF = dict(
- zip([str(x) for x in range(NUM_COMMITS)],
- [MOD_FILES for _ in range(NUM_COMMITS)])) #type: ignore
+ REV_DIFF = dict(zip([str(x) for x in range(NUM_COMMITS)],
+ [MOD_FILES] * NUM_COMMITS)) #type: ignore
NO_REV_DIFF = dict(
zip([str(x) for x in range(NUM_COMMITS)], [None for _ in range(NUM_COMMITS)]))
diff --git a/buildscripts/tests/test_evergreen_gen_fuzzer_tests.py b/buildscripts/tests/test_evergreen_gen_fuzzer_tests.py
index cddb2379ea6..ef1c4d205c2 100644
--- a/buildscripts/tests/test_evergreen_gen_fuzzer_tests.py
+++ b/buildscripts/tests/test_evergreen_gen_fuzzer_tests.py
@@ -1,7 +1,5 @@
"""Unit tests for the evergreen_gen_fuzzer_tests.py script."""
-from __future__ import absolute_import
-
import unittest
import mock
diff --git a/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py b/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
index 18d601ed1dc..2240c691c72 100644
--- a/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
+++ b/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
@@ -1,7 +1,5 @@
"""Unit tests for the generate_resmoke_suite script."""
-from __future__ import absolute_import
-
import datetime
import math
import os
@@ -234,7 +232,8 @@ class RenderSuites(unittest.TestCase):
suites = [create_suite(start=3 * i) for i in range(size)]
expected = [
- self.EXPECTED_FORMAT.format(*range(3 * i, 3 * (i + 1))) for i in range(len(suites))
+ self.EXPECTED_FORMAT.format(*list(range(3 * i, 3 * (i + 1))))
+ for i in range(len(suites))
]
m = mock_open(read_data=yaml.dump({"selector": {"roots": [], "excludes": ["fixed"]}}))
@@ -243,7 +242,7 @@ class RenderSuites(unittest.TestCase):
handle = m()
# The other writes are for the headers.
- self.assertEquals(len(suites) * 2, handle.write.call_count)
+ self.assertEqual(len(suites) * 2, handle.write.call_count)
handle.write.assert_has_calls([call(e) for e in expected], any_order=True)
calls = [
call(os.path.join(grt.TEST_SUITE_DIR, "suite_name.yml"), "r")
@@ -275,7 +274,7 @@ class RenderMiscSuites(unittest.TestCase):
handle = m()
# The other writes are for the headers.
- self.assertEquals(2, handle.write.call_count)
+ self.assertEqual(2, handle.write.call_count)
handle.write.assert_any_call("""selector:
exclude_files:
- test0
@@ -500,6 +499,7 @@ class MainTest(unittest.TestCase):
main = grt.Main(evg)
main.options = Mock()
+ main.options.max_sub_suites = 1000
main.config_options = self.get_mock_options()
with patch("os.path.exists") as exists_mock, patch(ns("suitesconfig")) as suitesconfig_mock:
diff --git a/buildscripts/tests/test_evergreen_resmoke_job_count.py b/buildscripts/tests/test_evergreen_resmoke_job_count.py
index f284e122fba..d566d4c3904 100644
--- a/buildscripts/tests/test_evergreen_resmoke_job_count.py
+++ b/buildscripts/tests/test_evergreen_resmoke_job_count.py
@@ -1,7 +1,5 @@
"""Unit tests for the evergreen_resomke_job_count script."""
-from __future__ import division
-
import unittest
import psutil
diff --git a/buildscripts/tests/test_evergreen_task_tags.py b/buildscripts/tests/test_evergreen_task_tags.py
index 582bd93c87b..3572f914981 100644
--- a/buildscripts/tests/test_evergreen_task_tags.py
+++ b/buildscripts/tests/test_evergreen_task_tags.py
@@ -12,7 +12,7 @@ from buildscripts import evergreen_task_tags as ett
def gen_tag_set(prefix, size):
- return set([prefix + " " + str(index) for index in range(size)])
+ return {prefix + " " + str(index) for index in range(size)}
class TestGetAllTaskTags(unittest.TestCase):
diff --git a/buildscripts/tests/test_evergreen_task_timeout.py b/buildscripts/tests/test_evergreen_task_timeout.py
index 86156126eb9..80c882e1488 100644
--- a/buildscripts/tests/test_evergreen_task_timeout.py
+++ b/buildscripts/tests/test_evergreen_task_timeout.py
@@ -1,7 +1,5 @@
"""Unit tests for the evergreen_task_timeout script."""
-from __future__ import absolute_import
-
import unittest
from buildscripts import evergreen_task_timeout as ett
diff --git a/buildscripts/tests/test_fetch_test_lifecycle.py b/buildscripts/tests/test_fetch_test_lifecycle.py
index 14c57435dab..c2929c1a7d0 100644
--- a/buildscripts/tests/test_fetch_test_lifecycle.py
+++ b/buildscripts/tests/test_fetch_test_lifecycle.py
@@ -1,5 +1,4 @@
"""Unit tests for the fetch_test_lifecycle.py script."""
-from __future__ import absolute_import
import unittest
@@ -10,11 +9,11 @@ import buildscripts.fetch_test_lifecycle as fetch
class TestFetchTestLifecycle(unittest.TestCase):
def test_get_metadata_revision(self):
- metadata_repo = MockMetadataRepository(
- [("metadata_revision_05", "mongo_revision_06"),
- ("metadata_revision_04", "mongo_revision_06"), ("metadata_revision_03",
- "mongo_revision_02"),
- ("metadata_revision_02", "mongo_revision_02"), ("metadata_revision_01", None)])
+ metadata_repo = MockMetadataRepository([("metadata_revision_05", "mongo_revision_06"),
+ ("metadata_revision_04", "mongo_revision_06"),
+ ("metadata_revision_03", "mongo_revision_02"),
+ ("metadata_revision_02", "mongo_revision_02"),
+ ("metadata_revision_01", None)])
mongo_repo = MockMongoRepository([
"mongo_revision_07", "mongo_revision_06", "mongo_revision_05", "mongo_revision_04",
diff --git a/buildscripts/tests/test_git.py b/buildscripts/tests/test_git.py
index 77a80e103f6..c041fc6467b 100644
--- a/buildscripts/tests/test_git.py
+++ b/buildscripts/tests/test_git.py
@@ -1,7 +1,5 @@
"""Unit tests for the buildscripts.git module."""
-from __future__ import absolute_import
-
import subprocess
import unittest
@@ -51,7 +49,7 @@ class TestRepository(unittest.TestCase):
def _check_gito_command(self, method, command, params):
# Initialize subprocess mock.
self.subprocess.call_output_args = None # pylint: disable=attribute-defined-outside-init
- self.subprocess.call_output = str(method)
+ self.subprocess.call_output = str(method).encode("utf-8")
self.subprocess.call_returncode = 0
# Call method.
value = method(params)
@@ -82,7 +80,7 @@ class MockSubprocess(object):
def __init__(self):
self.call_args = None
self.call_returncode = 0
- self.call_output = ""
+ self.call_output = b""
def Popen(self, args, **kwargs): # pylint: disable=invalid-name,unused-argument
self.call_args = args
@@ -95,4 +93,4 @@ class MockProcess(object):
self._output = output
def communicate(self):
- return self._output, ""
+ return self._output, b""
diff --git a/buildscripts/tests/test_remote_operations.py b/buildscripts/tests/test_remote_operations.py
index f119e4ffb8a..696faca8def 100755
--- a/buildscripts/tests/test_remote_operations.py
+++ b/buildscripts/tests/test_remote_operations.py
@@ -4,8 +4,6 @@
Note - Tests require sshd to be enabled on localhost with paswordless login
and can fail otherwise."""
-from __future__ import absolute_import
-
import os
import shutil
import tempfile
diff --git a/buildscripts/tests/test_update_test_lifecycle.py b/buildscripts/tests/test_update_test_lifecycle.py
index c6fa0fce86a..60c31322f5f 100644
--- a/buildscripts/tests/test_update_test_lifecycle.py
+++ b/buildscripts/tests/test_update_test_lifecycle.py
@@ -2,8 +2,6 @@
Tests for buildscripts/update_test_lifecycle.py.
"""
-from __future__ import absolute_import
-
import collections
import copy
import datetime
@@ -593,10 +591,10 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
test_fail_rates=self.CONFIG.test_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable"]),
- ]))
+ self.transition_from_reliable_to_unreliable(
+ config, collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable"]),
+ ]))
def test_transition_task_from_reliable_to_unreliable(self):
"""
@@ -607,11 +605,10 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
task_fail_rates=self.CONFIG.task_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT"]),
- ]))
+ self.transition_from_reliable_to_unreliable(
+ config, collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT"]),
+ ]))
def test_transition_variant_from_reliable_to_unreliable(self):
"""
@@ -622,11 +619,11 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT|linux-64"]),
- ]))
+ self.transition_from_reliable_to_unreliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64"]),
+ ]))
def test_transition_distro_from_reliable_to_unreliable(self):
"""
@@ -637,11 +634,11 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT|linux-64|rhel62"]),
- ]))
+ self.transition_from_reliable_to_unreliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64|rhel62"]),
+ ]))
def test_transition_from_reliable_to_unreliable(self):
"""
@@ -654,15 +651,16 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(unacceptable=0.1),
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(unacceptable=0.1))
- self.transition_from_reliable_to_unreliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_reliable_to_unreliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def transition_from_unreliable_to_reliable(self, config, initial_tags):
"""
@@ -734,9 +732,10 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
utl.update_tags(summary_lifecycle, config, test_history)
updated_tags = self.assert_has_only_js_tests(lifecycle)
# The tags for variant and distro have been removed.
- self.assertEqual(updated_tags,
- collections.OrderedDict([("jstests/core/all.js",
- ["unreliable", "unreliable|jsCore_WT"])]))
+ self.assertEqual(
+ updated_tags,
+ collections.OrderedDict([("jstests/core/all.js", ["unreliable",
+ "unreliable|jsCore_WT"])]))
def test_non_running_at_all_is_reliable(self):
"""
@@ -780,10 +779,10 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
test_fail_rates=self.CONFIG.test_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js", ["unreliable"]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config, collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable"]),
+ ]))
def test_transition_task_from_unreliable_to_reliable(self):
"""
@@ -794,11 +793,10 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
task_fail_rates=self.CONFIG.task_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT"]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config, collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT"]),
+ ]))
def test_transition_variant_from_unreliable_to_reliable(self):
"""
@@ -809,11 +807,11 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT|linux-64"]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64"]),
+ ]))
def test_transition_distro_from_unreliable_to_reliable(self):
"""
@@ -824,11 +822,11 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
config = self.CONFIG._replace(
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js",
- ["unreliable|jsCore_WT|linux-64|rhel62"]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", ["unreliable|jsCore_WT|linux-64|rhel62"]),
+ ]))
def test_transition_from_unreliable_to_reliable(self):
"""
@@ -842,15 +840,16 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
variant_fail_rates=self.CONFIG.variant_fail_rates._replace(acceptable=0.9),
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9))
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def test_remain_reliable(self):
"""
@@ -936,15 +935,16 @@ class TestUpdateTags(unittest.TestCase): # pylint: disable=too-many-public-meth
distro_fail_rates=self.CONFIG.distro_fail_rates._replace(acceptable=0.9),
reliable_min_runs=100)
- self.transition_from_unreliable_to_reliable(config,
- collections.OrderedDict([
- ("jstests/core/all.js", [
- "unreliable",
- "unreliable|jsCore_WT",
- "unreliable|jsCore_WT|linux-64",
- "unreliable|jsCore_WT|linux-64|rhel62",
- ]),
- ]))
+ self.transition_from_unreliable_to_reliable(
+ config,
+ collections.OrderedDict([
+ ("jstests/core/all.js", [
+ "unreliable",
+ "unreliable|jsCore_WT",
+ "unreliable|jsCore_WT|linux-64",
+ "unreliable|jsCore_WT|linux-64|rhel62",
+ ]),
+ ]))
def test_obeys_unreliable_min_runs(self):
"""
@@ -1027,7 +1027,7 @@ class MockEvergreenConfig(object):
def __init__(self, tasks, variants):
self.task_names = tasks
self.variants = {}
- for name, fields in variants.items():
+ for name, fields in list(variants.items()):
self.variants[name] = MockVariant(fields["tasks"], fields["distros"])
def get_variant(self, variant_name):
@@ -1089,8 +1089,8 @@ class TestJiraIssueCreator(unittest.TestCase):
desc += "a"
self.assertTrue(
- len(utl.JiraIssueCreator._truncate_description(desc)) <=
- utl.JiraIssueCreator._MAX_DESCRIPTION_SIZE)
+ len(utl.JiraIssueCreator._truncate_description(desc)) <= utl.JiraIssueCreator.
+ _MAX_DESCRIPTION_SIZE)
class TestTagsConfigWithChangelog(unittest.TestCase):
diff --git a/buildscripts/tests/util/test_read_config.py b/buildscripts/tests/util/test_read_config.py
index c3887fcd8ec..a8109c4d4a7 100644
--- a/buildscripts/tests/util/test_read_config.py
+++ b/buildscripts/tests/util/test_read_config.py
@@ -1,7 +1,5 @@
"""Unit tests for the util/read_config.py file."""
-from __future__ import absolute_import
-
import unittest
import mock
diff --git a/buildscripts/tests/util/test_taskname.py b/buildscripts/tests/util/test_taskname.py
index cf2811c7bca..371e1f95d02 100644
--- a/buildscripts/tests/util/test_taskname.py
+++ b/buildscripts/tests/util/test_taskname.py
@@ -1,7 +1,5 @@
"""Unit tests for the util/taskname.py script."""
-from __future__ import absolute_import
-
import unittest
from buildscripts.util import taskname as taskname
diff --git a/buildscripts/tests/util/test_testname.py b/buildscripts/tests/util/test_testname.py
index b983c7dbd0f..c37f754ba33 100644
--- a/buildscripts/tests/util/test_testname.py
+++ b/buildscripts/tests/util/test_testname.py
@@ -1,7 +1,5 @@
"""Unit test for the util.testname module."""
-from __future__ import absolute_import
-
import unittest
import buildscripts.util.testname as testname_utils
diff --git a/buildscripts/tests/util/test_time.py b/buildscripts/tests/util/test_time.py
index 3210eae2ed8..78fc59bc01f 100644
--- a/buildscripts/tests/util/test_time.py
+++ b/buildscripts/tests/util/test_time.py
@@ -1,7 +1,5 @@
"""Unit test for the util.time module."""
-from __future__ import absolute_import
-
import unittest
import util.time as time_utils
diff --git a/buildscripts/update_test_lifecycle.py b/buildscripts/update_test_lifecycle.py
index ab0044e784d..4f8d102a736 100755
--- a/buildscripts/update_test_lifecycle.py
+++ b/buildscripts/update_test_lifecycle.py
@@ -4,9 +4,6 @@
Update etc/test_lifecycle.yml to tag unreliable tests based on historic failure rates.
"""
-from __future__ import absolute_import
-from __future__ import division
-
import collections
import datetime
import itertools
@@ -39,11 +36,7 @@ from buildscripts.util import testname
# pylint: disable=too-many-lines
LOGGER = logging.getLogger(__name__)
-
-if sys.version_info[0] == 2:
- _NUMBER_TYPES = (int, long, float)
-else:
- _NUMBER_TYPES = (int, float)
+_NUMBER_TYPES = (int, float)
Rates = collections.namedtuple("Rates", ["acceptable", "unacceptable"])
@@ -90,7 +83,7 @@ def get_test_tasks_membership(evg_conf):
test_suites_membership = resmokelib.suitesconfig.create_test_membership_map(test_kind="js_test")
suite_tasks_membership = get_suite_tasks_membership(evg_conf)
test_tasks_membership = collections.defaultdict(list)
- for test in test_suites_membership.keys():
+ for test in list(test_suites_membership.keys()):
for suite in test_suites_membership[test]:
test_tasks_membership[test].extend(suite_tasks_membership[suite])
return test_tasks_membership
@@ -100,7 +93,7 @@ def get_tests_from_tasks(tasks, test_tasks_membership):
"""Return a list of tests from list of specified tasks."""
tests = []
tasks_set = set(tasks)
- for test in test_tasks_membership.keys():
+ for test in list(test_tasks_membership.keys()):
if not tasks_set.isdisjoint(test_tasks_membership[test]):
tests.append(test)
return tests
@@ -202,9 +195,6 @@ class TestCombination(object):
def __ne__(self, other):
return not self.__eq__(other)
- def __cmp__(self, other):
- return cmp(self.as_tuple(), other.as_tuple())
-
def __hash__(self):
return hash(self.as_tuple())
@@ -409,7 +399,9 @@ def update_lifecycle( # pylint: disable=too-many-arguments
def compare_tags(tag_a, tag_b):
"""Return 1, -1 or 0 if 'tag_a' is superior, inferior or equal to 'tag_b'."""
- return cmp(tag_a.split("|"), tag_b.split("|"))
+ a_split = tag_a.split("|")
+ b_split = tag_b.split("|")
+ return (a_split > b_split) - (a_split < b_split)
def validate_config(config): # pylint: disable=too-many-branches
diff --git a/buildscripts/util/runcommand.py b/buildscripts/util/runcommand.py
index edf9e99816c..39dde68516e 100644
--- a/buildscripts/util/runcommand.py
+++ b/buildscripts/util/runcommand.py
@@ -1,29 +1,13 @@
"""Utility to support running a command in a subprocess."""
-from __future__ import print_function
-
import os
import pipes
import shlex
import sys
+import subprocess
from . import fileops
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may"
- " trigger an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
- import subprocess # type: ignore
-else:
- import subprocess
-
class RunCommand(object):
"""Class to abstract executing a subprocess."""
@@ -75,7 +59,7 @@ class RunCommand(object):
def execute_with_output(self):
"""Execute the command, return result as a string."""
- return subprocess.check_output(self._cmd_list())
+ return subprocess.check_output(self._cmd_list()).decode('utf-8')
def execute_save_output(self):
"""Execute the command, save result in 'self.output_file' and return returncode."""
diff --git a/buildscripts/utils.py b/buildscripts/utils.py
index 5073b26ad85..57d3f5da3a2 100644
--- a/buildscripts/utils.py
+++ b/buildscripts/utils.py
@@ -99,7 +99,7 @@ def get_git_describe():
with open(os.devnull, "r+") as devnull:
proc = subprocess.Popen("git describe", stdout=subprocess.PIPE, stderr=devnull,
stdin=devnull, shell=True)
- return proc.communicate()[0].strip()
+ return proc.communicate()[0].strip().decode('utf-8')
def execsys(args):
@@ -130,11 +130,10 @@ def which(executable):
return executable
-def find_python(min_version=(2, 5)):
+def find_python(min_version=(3, 7)):
"""Return path of python."""
try:
- if sys.version_info >= min_version:
- return sys.executable
+ return sys.executable
except AttributeError:
# In case the version of Python is somehow missing sys.version_info or sys.executable.
pass
@@ -154,8 +153,8 @@ def find_python(min_version=(2, 5)):
except Exception: # pylint: disable=broad-except
pass
- raise Exception("could not find suitable Python (version >= %s)" % ".".join(
- str(v) for v in min_version))
+ raise Exception(
+ "could not find suitable Python (version >= %s)" % ".".join(str(v) for v in min_version))
def replace_with_repr(unicode_error):
@@ -166,7 +165,7 @@ def replace_with_repr(unicode_error):
# repr() of the offending bytes into the decoded string
# at the position they occurred
offender = unicode_error.object[unicode_error.start:unicode_error.end]
- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
+ return (str(repr(offender).strip("'").strip('"')), unicode_error.end)
codecs.register_error("repr", replace_with_repr)
diff --git a/buildscripts/validate_mongocryptd.py b/buildscripts/validate_mongocryptd.py
index 1c334bcb1d1..2b8ba49c8b9 100644
--- a/buildscripts/validate_mongocryptd.py
+++ b/buildscripts/validate_mongocryptd.py
@@ -66,8 +66,9 @@ def main():
if not args.variant in expected_variants:
print("ERROR: Expected to find variant %s in list %s" % (args.variant, expected_variants),
file=sys.stderr)
- print("ERROR: Please add the build variant %s to the %s list in '%s'" %
- (args.variant, MONGOCRYPTD_VARIANTS, args.file), file=sys.stderr)
+ print(
+ "ERROR: Please add the build variant %s to the %s list in '%s'" %
+ (args.variant, MONGOCRYPTD_VARIANTS, args.file), file=sys.stderr)
sys.exit(1)
sys.exit(0)
diff --git a/buildscripts/yaml_key_value.py b/buildscripts/yaml_key_value.py
index 27e975433d4..848e1daffd9 100755
--- a/buildscripts/yaml_key_value.py
+++ b/buildscripts/yaml_key_value.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
"""Utility to return YAML value from key in YAML file."""
-from __future__ import print_function
-
import optparse
import yaml
diff --git a/docs/building.md b/docs/building.md
index 0349e0d38c0..2d902ed7238 100644
--- a/docs/building.md
+++ b/docs/building.md
@@ -10,7 +10,7 @@ To build MongoDB, you will need:
* On Linux and macOS, the libcurl library and header is required. MacOS includes libcurl.
* Fedora/RHEL - `dnf install libcurl-devel`
* Ubuntu/Debian - `apt-get install libcurl-dev`
-* Python 2.7.x and Pip modules:
+* Python 3.7.x and Pip modules:
* See the section "Python Prerequisites" below.
MongoDB supports the following architectures: arm64, ppc64le, s390x, and x86-64.
@@ -28,13 +28,13 @@ The source for the tools is now available at [mongodb/mongo-tools](https://githu
Python Prerequisites
---------------
-In order to build MongoDB, Python 2.7.x is required, and several Python modules. To install
+In order to build MongoDB, Python 3.7.x is required, and several Python modules. To install
the required Python modules, run:
- $ pip2 install -r etc/pip/compile-requirements.txt
+ $ pip3 install -r etc/pip/compile-requirements.txt
-Note: If the `pip2` command is not available, `pip` without a suffix may be the pip command
-associated with Python 2.7.x.
+Note: If the `pip3` command is not available, `pip` without a suffix may be the pip command
+associated with Python 3.7.x.
SCons
---------------
@@ -43,19 +43,19 @@ For detail information about building, please see [the build manual](https://git
If you want to build everything (mongod, mongo, tests, etc):
- $ python2 buildscripts/scons.py all
+ $ python3 buildscripts/scons.py all
If you only want to build the database:
- $ python2 buildscripts/scons.py mongod
+ $ python3 buildscripts/scons.py mongod
***Note***: For C++ compilers that are newer than the supported version, the compiler may issue new warnings that cause MongoDB to fail to build since the build system treats compiler warnings as errors. To ignore the warnings, pass the switch `--disable-warnings-as-errors` to scons.
- $ python2 buildscripts/scons.py mongod --disable-warnings-as-errors
+ $ python3 buildscripts/scons.py mongod --disable-warnings-as-errors
To install
- $ python2 buildscripts/scons.py --prefix=/opt/mongo install
+ $ python3 buildscripts/scons.py --prefix=/opt/mongo install
Please note that prebuilt binaries are available on [mongodb.org](http://www.mongodb.org/downloads) and may be the easiest way to get started.
@@ -77,7 +77,7 @@ See [the windows build manual](https://github.com/mongodb/mongo/wiki/Build-Mongo
Build requirements:
* Visual Studio 2017 version 15.9 or newer
-* Python 2.7, ActiveState ActivePython 2.7.x Community Edition for Windows is recommended
+* Python 3.7
Or download a prebuilt binary for Windows at www.mongodb.org.
diff --git a/etc/evergreen.yml b/etc/evergreen.yml
index 93909e36922..fe2c2394004 100644
--- a/etc/evergreen.yml
+++ b/etc/evergreen.yml
@@ -292,8 +292,7 @@ variables:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
multiversion_platform: windows
@@ -685,9 +684,9 @@ functions:
chmod 0600 ${private_key_file|/dev/null}
fi
- if [ ! -d ~.aws ]; then
- mkdir -p ~/.aws
- fi
+ # Ensure a clean aws configuration state
+ rm -rf ~/.aws
+ mkdir -p ~/.aws
# If ${aws_profile_remote} is not specified then the config & credentials are
# stored in the 'default' profile.
@@ -795,7 +794,7 @@ functions:
set -o errexit
set -o verbose
if [ "${build_openssl|}" = "true" ]; then
- bash buildscripts/fetch_and_build_openssl.sh "${python|python}" "${openssl_make_flags|}" "${openssl_config_flags|}"
+ bash buildscripts/fetch_and_build_openssl.sh "${python|python3}" "${openssl_make_flags|}" "${openssl_config_flags|}"
fi
"use WiredTiger develop":
@@ -1058,7 +1057,7 @@ functions:
set -o errexit
${activate_virtualenv}
- python2 -m pip install -r etc/pip/evgtest-requirements.txt
+ python -m pip install -r etc/pip/evgtest-requirements.txt
"determine resmoke jobs": &determine_resmoke_jobs
command: shell.exec
@@ -1183,30 +1182,19 @@ functions:
virtualenv_loc=$(which ${virtualenv|virtualenv})
- python2_loc=$(which ${python|/opt/mongodbtoolchain/v3/bin/python2})
- python3_loc=$(which ${python3|/opt/mongodbtoolchain/v3/bin/python3})
- venv2_dir="${workdir}/venv"
- venv3_dir="${workdir}/venv_3"
+ python_loc=$(which ${python|/opt/mongodbtoolchain/v3/bin/python3})
+ venv_dir="${workdir}/venv"
if command -V cygpath; then
# Sad note: We have to use the Windows path instead of the posix path here.
# Otherwise, virtualenv may mistakenly resolve paths relative to c:\cygdrive.
- python2_loc=$(cygpath -w $python2_loc)
- python3_loc=$(cygpath -w $python3_loc)
- venv2_dir="$(cygpath -w "$venv2_dir")"
- venv3_dir="$(cygpath -w "$venv3_dir")"
+ python_loc=$(cygpath -w $python_loc)
+ venv_dir="$(cygpath -w "$venv_dir")"
fi
# Set up virtualenvs in ${workdir}
- "$virtualenv_loc" --python "$python2_loc" --system-site-packages "$venv2_dir"
- "$virtualenv_loc" --python "$python3_loc" --system-site-packages "$venv3_dir"
+ "$virtualenv_loc" --python "$python_loc" --system-site-packages "$venv_dir"
- # Link python2/python3 in the primary virtualenv bin dir
export VIRTUAL_ENV_DISABLE_PROMPT=yes
- venv2_bin="$(dirname "$(cd "$venv2_dir"; . ./*/activate; which python)")"
- py2_exe="$(cd "$venv2_dir"; . ./*/activate; which python)"
- py3_exe="$(cd "$venv3_dir"; . ./*/activate; which python)"
- if [[ ! -f $venv2_bin/python2 ]]; then ln -sfv "$py2_exe" "$venv2_bin/python2"; fi
- if [[ ! -f $venv2_bin/python3 ]]; then ln -sfv "$py3_exe" "$venv2_bin/python3"; fi
"execute resmoke tests": &execute_resmoke_tests
command: shell.exec
@@ -1500,7 +1488,7 @@ functions:
${activate_virtualenv}
- python2 -m pip install -r ./etc/pip/compile-requirements.txt
+ python -m pip install -r ./etc/pip/compile-requirements.txt
${compile_env|} $python ./buildscripts/scons.py \
${compile_flags|} ${task_compile_flags|} ${task_compile_flags_extra|} \
${scons_cache_args|} $extra_args \
@@ -1952,8 +1940,6 @@ functions:
set -o verbose
${activate_virtualenv}
- # The Windows build variants are running python 2.7.3 and require TLS 1.2 from pyOpenSSL
- python2 -m pip install 'pyOpenSSL ; sys_platform == "win32" or sys_platform == "cygwin"'
rm -rf /data/install /data/multiversion
$python buildscripts/setup_multiversion_mongodb.py \
@@ -2158,7 +2144,7 @@ functions:
set -o errexit
${activate_virtualenv}
- python2 -m pip install -r etc/pip/powercycle-requirements.txt
+ python -m pip install -r etc/pip/powercycle-requirements.txt
if [ ! -z "${subnet_id}" ]; then
subnet_id="-n ${subnet_id}"
@@ -2310,7 +2296,7 @@ functions:
script: |
set -o errexit
# Set up virtualenv on remote.
- cmds="python_loc=\$(which \${python|/opt/mongodbtoolchain/v3/bin/python2})"
+ cmds="python_loc=\$(which \${python|/opt/mongodbtoolchain/v3/bin/python3})"
cmds="$cmds; remote_dir=${remote_dir|.}"
cmds="$cmds; if [ \"Windows_NT\" = \"$OS\" ]; then python_loc=\$(cygpath -w \$python_loc); remote_dir=\$(cygpath -w \$remote_dir); fi"
cmds="$cmds; virtualenv --python \$python_loc --system-site-packages ${virtualenv_dir|venv}"
@@ -2773,30 +2759,15 @@ functions:
fi
python=python
else
- python=${python|/opt/mongodbtoolchain/v3/bin/python2}
+ python=${python|/opt/mongodbtoolchain/v3/bin/python3}
fi
- echo "python set to $(which $python)"
- - key: activate_virtualenv_3
- value: |
- # check if virtualenv for python3 is set up
- if [ -d "${workdir}/venv_3" ]; then
- if [ "Windows_NT" = "$OS" ]; then
- # Need to quote the path on Windows to preserve the separator.
- . "${workdir}/venv_3/Scripts/activate" 2> /tmp/activate_error.log
- else
- . ${workdir}/venv_3/bin/activate 2> /tmp/activate_error.log
- fi
- if [ $? -ne 0 ]; then
- echo "Failed to activate virtualenv: $(cat /tmp/activate_error.log)"
- fi
- python=python
+
+ if [ "Windows_NT" = "$OS" ]; then
+ export PYTHONPATH="$PYTHONPATH;$(cygpath -w ${workdir}/src)"
else
- if [ "Windows_NT" = "$OS" ]; then
- python=/cygdrive/c/python/python36/python
- else
- python=${python3|/opt/mongodbtoolchain/v3/bin/python3}
- fi
+ export PYTHONPATH="$PYTHONPATH:${workdir}/src"
fi
+
echo "python set to $(which $python)"
- key: posix_workdir
value: eval 'if [ "Windows_NT" = "$OS" ]; then echo $(cygpath -u "${workdir}"); else echo ${workdir}; fi'
@@ -4594,8 +4565,7 @@ tasks:
set -o verbose
${activate_virtualenv}
- python3 -m pip install -I -r etc/pip/lint-requirements.txt
- python2 -m pip install -I -r etc/pip/lint-requirements.txt
+ python -m pip install -I -r etc/pip/lint-requirements.txt
export MYPY="$(
if command -V cygpath 2>/dev/null; then
PATH+=":$(cypath "${workdir}")/venv_3/Scripts"
@@ -4606,7 +4576,7 @@ tasks:
which mypy
)"
echo "Found mypy executable at '$MYPY'"
- ${compile_env|} python2 ./buildscripts/scons.py ${compile_flags|} --stack-size=1024 lint
+ ${compile_env|} python3 ./buildscripts/scons.py ${compile_flags|} --stack-size=1024 lint
- name: lint_yaml
depends_on: []
@@ -4646,22 +4616,17 @@ tasks:
${activate_virtualenv}
# This installs the explicit project versions which would be installed in the toolchain
# from this patch
- python2 -m pip install -I -r etc/pip/constraints.txt
- python3 -m pip install -I -r etc/pip/constraints.txt
+ python -m pip install -I -r etc/pip/constraints.txt
- python2 -m pip freeze >requirements.txt.python2.old
- python3 -m pip freeze >requirements.txt.python3.old
+ python -m pip freeze >requirements.txt.python3.old
# This installs any requirements which are unsatisfied by the constraints.txt above
- python2 -m pip install -r etc/pip/toolchain-requirements.txt
- python3 -m pip install -r etc/pip/toolchain-requirements.txt
+ python -m pip install -r etc/pip/toolchain-requirements.txt
- python2 -m pip freeze >requirements.txt.python2.new
- python3 -m pip freeze >requirements.txt.python3.new
+ python -m pip freeze >requirements.txt.python3.new
# Compare the old freezes to the new freezes.
# They should be the same if our constraints satisfy our toolchain requirements
- diff -w requirements.txt.python2.old requirements.txt.python2.new
diff -w requirements.txt.python3.old requirements.txt.python3.new
### This task is deprecated, but left in here in case of need to run burn_in_tests
@@ -6812,8 +6777,8 @@ tasks:
# We happen to pre-seed the core requirements in the toolchain,
# but we have no way of doing so to Windows python3.6 installed via Chocolatey.
if command -V cygpath; then
- echo "Installing core requirements into Python at '${python3}'"
- "$(cygpath "${python3}")" -mpip install -r etc/pip/core-requirements.txt
+ echo "Installing core requirements into Python at '${python}'"
+ "$(cygpath "${python}")" -mpip install -r etc/pip/core-requirements.txt
fi
- func: "run tests"
vars:
@@ -7651,7 +7616,7 @@ tasks:
set -o verbose
${activate_virtualenv}
- python2 -m pip install -r etc/pip/jira-requirements.txt
+ python -m pip install -r etc/pip/jira-requirements.txt
# We use a small batch size to avoid hitting the load balancer timeout if the Evergreen
# API query is not fast enough.
@@ -9366,8 +9331,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
multiversion_platform: windows
@@ -9554,8 +9518,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
multiversion_platform: windows
@@ -9681,8 +9644,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
test_flags: |- # Use the ServiceExecutorAdaptive with a reasonable number of starting threads
@@ -9715,8 +9677,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
test_flags: --storageEngine=inMemory --excludeWithAnyTags=requires_persistence,requires_journaling,uses_transactions
ext: zip
use_scons_cache: true
@@ -9792,8 +9753,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
tooltags: "ssl"
@@ -9913,8 +9873,7 @@ buildvariants:
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: python
- python3: '/cygdrive/c/python/python36/python.exe'
+ python: '/cygdrive/c/python/python36/python.exe'
ext: zip
use_scons_cache: true
tooltags: "ssl sasl"
diff --git a/etc/pip/components/compile.req b/etc/pip/components/compile.req
index 9f15201c3f5..34a8ff66e0f 100644
--- a/etc/pip/components/compile.req
+++ b/etc/pip/components/compile.req
@@ -1,5 +1,4 @@
# Mongo compile
-Cheetah3; python_version < "3" # src/mongo/base/generate_error_codes.py
+Cheetah3 # src/mongo/base/generate_error_codes.py
psutil
regex
-typing; python_version < "3"
diff --git a/etc/pip/components/core.req b/etc/pip/components/core.req
index 1ddc9d1d8e9..549c6a764db 100644
--- a/etc/pip/components/core.req
+++ b/etc/pip/components/core.req
@@ -1,5 +1,5 @@
-# Core (we need these for most builscripts)
+# Core (we need these for most buildscripts)
PyYAML >= 3.0.0
requests >= 2.0.0
-pymongo >= 3.0, < 3.6.0 # See PYTHON-1434, SERVER-34820
+pymongo >= 3.0, != 3.6.0 # See PYTHON-1434, SERVER-34820
psutil
diff --git a/etc/pip/components/lint.req b/etc/pip/components/lint.req
index 6dbef69b999..ed95dcd3363 100644
--- a/etc/pip/components/lint.req
+++ b/etc/pip/components/lint.req
@@ -1,6 +1,6 @@
# Linters
# Note: These versions are checked by python modules in buildscripts/linter/
-pylint == 1.9.3
-yapf == 0.21.0
+pylint == 2.3.1
+yapf == 0.26.0
pydocstyle == 2.1.1
yamllint == 1.15.0
diff --git a/etc/pip/components/platform.req b/etc/pip/components/platform.req
index 26138b57c99..eed1eb67db9 100644
--- a/etc/pip/components/platform.req
+++ b/etc/pip/components/platform.req
@@ -1,4 +1,2 @@
# Platform-specific components
-pypiwin32==219; sys_platform == "win32" and python_version < "3"
-pypiwin32==223; sys_platform == "win32" and python_version > "3"
-subprocess32==3.5.2; os_name == "posix" and platform_release != "2.6.18-194.el5xen" and platform_release != "2.6.18-274.el5xen" and python_version < "3" \ No newline at end of file
+pypiwin32==223; sys_platform == "win32" and python_version > "3" \ No newline at end of file
diff --git a/etc/pip/components/resmoke.req b/etc/pip/components/resmoke.req
index f9bd3ae9adc..7f79f45b164 100644
--- a/etc/pip/components/resmoke.req
+++ b/etc/pip/components/resmoke.req
@@ -1,4 +1,4 @@
-mock; python_version < "3"
-PyKMIP == 0.4.0; python_version < "3" # It's now 0.8.0. We're far enough back to have API conflicts.
+mock
+PyKMIP == 0.4.0 # It's now 0.8.0. We're far enough back to have API conflicts.
jinja2
shrub.py == 0.2.0 \ No newline at end of file
diff --git a/etc/pip/constraints.txt b/etc/pip/constraints.txt
index 739f27919d6..8ed2e5f4bcf 100644
--- a/etc/pip/constraints.txt
+++ b/etc/pip/constraints.txt
@@ -6,35 +6,40 @@
# Common requirements
asn1crypto==0.24.0
-astroid==1.6.5
-boto3==1.9.99
-botocore==1.12.99
-certifi==2018.11.29
-cffi==1.12.1
+astroid==2.2.5
+boto3==1.9.126
+botocore==1.12.126
+certifi==2019.3.9
+cffi==1.12.2
chardet==3.0.4
+Cheetah3==3.2.1
cryptography==2.3
defusedxml==0.5.0
docutils==0.14
+enum34==1.1.6
idna==2.8
-isort==4.3.4
+isort==4.3.16
Jinja2==2.10
jira==2.0.0
-jmespath==0.9.3
+jmespath==0.9.4
lazy-object-proxy==1.3.1
-MarkupSafe==1.1.0
+MarkupSafe==1.1.1
mccabe==0.6.1
+mock==2.0.0
+mypy==0.580
oauthlib==3.0.1
pathspec==0.5.9
-pbr==5.1.2
-psutil==5.5.1
+pbr==5.1.3
+psutil==5.6.1
pycparser==2.19
pydocstyle==2.1.1
PyJWT==1.7.1
-pylint==1.9.3
-pymongo==3.5.1
+PyKMIP==0.4.0
+pylint==2.3.1
+pymongo==3.7.2
python-dateutil==2.8.0
-PyYAML==3.13
-regex==2019.2.21
+PyYAML==5.1
+regex==2019.3.12
requests==2.21.0
requests-oauth==0.4.1
requests-oauthlib==1.2.0
@@ -43,30 +48,13 @@ s3transfer==0.2.0
shrub.py==0.2.0
six==1.12.0
snowballstemmer==1.2.1
+typed-ast==1.1.2
typing==3.6.6
-unittest-xml-reporting==2.2.1
+unittest-xml-reporting==2.5.1
urllib3==1.24.1
wrapt==1.11.1
yamllint==1.15.0
-yapf==0.21.0
-
-# Python2 requirements
-backports.functools-lru-cache==1.5; python_version < "3"
-Cheetah3==3.2.0; python_version < "3"
-configparser==3.7.1; python_version < "3"
-enum34==1.1.6; python_version < "3"
-funcsigs==1.0.2; python_version < "3"
-futures==3.2.0; python_version < "3"
-ipaddress==1.0.22; python_version < "3"
-mock==2.0.0; python_version < "3"
-PyKMIP==0.4.0; python_version < "3"
-singledispatch==3.4.0.3; python_version < "3"
-
-# Python3 requirements
-mypy==0.580; python_version > "3"
-typed-ast==1.1.2; python_version > "3"
+yapf==0.26.0
# Platform-specific components
-pypiwin32==219; sys_platform == "win32" and python_version < "3"
-pypiwin32==223; sys_platform == "win32" and python_version > "3"
-subprocess32==3.5.2; os_name == "posix" and platform_release != "2.6.18-194.el5xen" and platform_release != "2.6.18-274.el5xen" and python_version < "3" \ No newline at end of file
+pypiwin32==223; sys_platform == "win32" and python_version > "3" \ No newline at end of file
diff --git a/etc/scons/android_toolchain.vars b/etc/scons/android_toolchain.vars
index 73f3c012e69..14e12cb5709 100644
--- a/etc/scons/android_toolchain.vars
+++ b/etc/scons/android_toolchain.vars
@@ -29,14 +29,14 @@ CC=os.path.join(toolchain_bindir, "clang" + compiler_suffix)
CXX=os.path.join(toolchain_bindir, "clang++" + compiler_suffix)
try:
- AR = subprocess.check_output([CXX, '-print-prog-name=ar']).strip()
- AS = subprocess.check_output([CXX, '-print-prog-name=as']).strip()
- OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).strip()
+ AR = subprocess.check_output([CXX, '-print-prog-name=ar']).decode('utf-8').strip()
+ AS = subprocess.check_output([CXX, '-print-prog-name=as']).decode('utf-8').strip()
+ OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).decode('utf-8').strip()
except subprocess.CalledProcessError as e:
- print("Failed while invoking toolchain binary " + CXX + ": " + e.output)
+ print(("Failed while invoking toolchain binary " + CXX + ": " + e.output))
SCons.Script.Exit(-1)
except OSError as e:
- print("Failed to invoke toolchain binary " + CXX + ": " + str(e))
+ print(("Failed to invoke toolchain binary " + CXX + ": " + str(e)))
SCons.Script.Exit(-1)
LINKFLAGS='-static-libstdc++ -fuse-ld=gold'
diff --git a/etc/scons/mongodbtoolchain_stable_clang.vars b/etc/scons/mongodbtoolchain_stable_clang.vars
index de76357a199..2cc97f0cb4b 100644
--- a/etc/scons/mongodbtoolchain_stable_clang.vars
+++ b/etc/scons/mongodbtoolchain_stable_clang.vars
@@ -1,2 +1 @@
-
-execfile('etc/scons/mongodbtoolchain_v3_clang.vars')
+exec(open('etc/scons/mongodbtoolchain_v3_clang.vars', "rb").read())
diff --git a/etc/scons/mongodbtoolchain_stable_gcc.vars b/etc/scons/mongodbtoolchain_stable_gcc.vars
index a3a799d6fc2..cbda837767b 100644
--- a/etc/scons/mongodbtoolchain_stable_gcc.vars
+++ b/etc/scons/mongodbtoolchain_stable_gcc.vars
@@ -1,2 +1 @@
-
-execfile('etc/scons/mongodbtoolchain_v3_gcc.vars')
+exec(open('etc/scons/mongodbtoolchain_v3_gcc.vars', "rb").read())
diff --git a/etc/scons/mongodbtoolchain_testing_clang.vars b/etc/scons/mongodbtoolchain_testing_clang.vars
index 099cb80764b..8e272d27e6a 100644
--- a/etc/scons/mongodbtoolchain_testing_clang.vars
+++ b/etc/scons/mongodbtoolchain_testing_clang.vars
@@ -1,2 +1 @@
-
-execfile('etc/scons/mongodbtoolchain_stable_clang.vars')
+exec(open('etc/scons/mongodbtoolchain_stable_clang.vars', "rb").read())
diff --git a/etc/scons/mongodbtoolchain_testing_gcc.vars b/etc/scons/mongodbtoolchain_testing_gcc.vars
index c52cd08cd32..a1790bb0d2f 100644
--- a/etc/scons/mongodbtoolchain_testing_gcc.vars
+++ b/etc/scons/mongodbtoolchain_testing_gcc.vars
@@ -1,2 +1 @@
-
-execfile('etc/scons/mongodbtoolchain_stable_gcc.vars')
+exec(open('etc/scons/mongodbtoolchain_stable_gcc.vars', "rb").read())
diff --git a/etc/scons/mongodbtoolchain_v3_clang.vars b/etc/scons/mongodbtoolchain_v3_clang.vars
index a347beb7bca..3901c666079 100644
--- a/etc/scons/mongodbtoolchain_v3_clang.vars
+++ b/etc/scons/mongodbtoolchain_v3_clang.vars
@@ -29,12 +29,12 @@ CC = os.path.join(toolchain_bindir, 'clang')
CXX = os.path.join(toolchain_bindir, 'clang++')
try:
- AR = subprocess.check_output([CXX, '-print-prog-name=ar']).strip()
- AS = subprocess.check_output([CXX, '-print-prog-name=as']).strip()
- OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).strip()
+ AR = subprocess.check_output([CXX, '-print-prog-name=ar']).decode('utf-8').strip()
+ AS = subprocess.check_output([CXX, '-print-prog-name=as']).decode('utf-8').strip()
+ OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).decode('utf-8').strip()
except subprocess.CalledProcessError as e:
- print("Failed while invoking toolchain binary " + CXX + ": " + e.output)
+ print(("Failed while invoking toolchain binary " + CXX + ": " + e.output))
SCons.Script.Exit(-1)
except OSError as e:
- print("Failed to invoke toolchain binary " + CXX + ": " + str(e))
+ print(("Failed to invoke toolchain binary " + CXX + ": " + str(e)))
SCons.Script.Exit(-1)
diff --git a/etc/scons/mongodbtoolchain_v3_gcc.vars b/etc/scons/mongodbtoolchain_v3_gcc.vars
index 9d3803cb412..c3547fd43aa 100644
--- a/etc/scons/mongodbtoolchain_v3_gcc.vars
+++ b/etc/scons/mongodbtoolchain_v3_gcc.vars
@@ -29,12 +29,12 @@ CC = os.path.join(toolchain_bindir, 'gcc')
CXX = os.path.join(toolchain_bindir, 'g++')
try:
- AR = subprocess.check_output([CXX, '-print-prog-name=ar']).strip()
- AS = subprocess.check_output([CXX, '-print-prog-name=as']).strip()
- OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).strip()
+ AR = subprocess.check_output([CXX, '-print-prog-name=ar']).decode('utf-8').strip()
+ AS = subprocess.check_output([CXX, '-print-prog-name=as']).decode('utf-8').strip()
+ OBJCOPY = subprocess.check_output([CXX, '-print-prog-name=objcopy']).decode('utf-8').strip()
except subprocess.CalledProcessError as e:
- print("Failed while invoking toolchain binary " + CXX + ": " + e.output)
+ print(("Failed while invoking toolchain binary " + CXX + ": " + e.output))
SCons.Script.Exit(-1)
except OSError as e:
- print("Failed to invoke toolchain binary " + CXX + ": " + str(e))
+ print(("Failed to invoke toolchain binary " + CXX + ": " + str(e)))
SCons.Script.Exit(-1)
diff --git a/etc/scons/xcode_ios.vars b/etc/scons/xcode_ios.vars
index ad2e5b5b68f..007fad1dfda 100644
--- a/etc/scons/xcode_ios.vars
+++ b/etc/scons/xcode_ios.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphoneos', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphoneos', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphoneos', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphoneos', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'iphoneos', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'iphoneos', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -miphoneos-version-min=11.0 -arch arm64 -fembed-bitcode".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -miphoneos-version-min=11.0 -arch arm64 -fembed-bitcode".format(sdk_path)
diff --git a/etc/scons/xcode_ios_sim.vars b/etc/scons/xcode_ios_sim.vars
index a4ddd9a403e..ec9869dd314 100644
--- a/etc/scons/xcode_ios_sim.vars
+++ b/etc/scons/xcode_ios_sim.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphonesimulator', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphonesimulator', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphonesimulator', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'iphonesimulator', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'iphonesimulator', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'iphonesimulator', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -miphoneos-version-min=11.0 -arch x86_64 -fembed-bitcode".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -miphoneos-version-min=11.0 -arch x86_64 -fembed-bitcode".format(sdk_path)
diff --git a/etc/scons/xcode_macosx.vars b/etc/scons/xcode_macosx.vars
index 166a41e6e2a..e5ff09079cd 100644
--- a/etc/scons/xcode_macosx.vars
+++ b/etc/scons/xcode_macosx.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'macosx', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'macosx', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'macosx', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -mmacosx-version-min=10.12".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -mmacosx-version-min=10.12".format(sdk_path)
diff --git a/etc/scons/xcode_tvos.vars b/etc/scons/xcode_tvos.vars
index fd1a3b1fdc5..7dc17c8aa13 100644
--- a/etc/scons/xcode_tvos.vars
+++ b/etc/scons/xcode_tvos.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvos', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvos', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvos', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvos', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'appletvos', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'appletvos', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -mtvos-version-min=11.0 -arch arm64 -fembed-bitcode".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -mtvos-version-min=11.0 -arch arm64 -fembed-bitcode".format(sdk_path)
diff --git a/etc/scons/xcode_tvos_sim.vars b/etc/scons/xcode_tvos_sim.vars
index 0ab352189ec..48b79d2f2f8 100644
--- a/etc/scons/xcode_tvos_sim.vars
+++ b/etc/scons/xcode_tvos_sim.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvsimulator', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvsimulator', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvsimulator', 'clang']).decode.strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'appletvsimulator', 'clang++']).decode.strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'appletvsimulator', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'appletvsimulator', '--show-sdk-path']).decode.strip()
CCFLAGS = "-isysroot {} -mtvos-version-min=11.0 -arch x86_64 -fembed-bitcode".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -mtvos-version-min=11.0 -arch x86_64 -fembed-bitcode".format(sdk_path)
diff --git a/etc/scons/xcode_watchos.vars b/etc/scons/xcode_watchos.vars
index f6b4843cd7f..6c49ed7034a 100644
--- a/etc/scons/xcode_watchos.vars
+++ b/etc/scons/xcode_watchos.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchos', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchos', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchos', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchos', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'watchos', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'watchos', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -mwatchos-version-min=4.2 -arch armv7k -fembed-bitcode -fapplication-extension".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -mwatchos-version-min=4.2 -arch armv7k -fembed-bitcode -fapplication-extension".format(sdk_path)
diff --git a/etc/scons/xcode_watchos_sim.vars b/etc/scons/xcode_watchos_sim.vars
index 4cd2b12982c..b237c5c2039 100644
--- a/etc/scons/xcode_watchos_sim.vars
+++ b/etc/scons/xcode_watchos_sim.vars
@@ -2,10 +2,10 @@
import subprocess
-CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchsimulator', 'clang']).strip()
-CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchsimulator', 'clang++']).strip()
+CC = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchsimulator', 'clang']).decode('utf-8').strip()
+CXX = subprocess.check_output(['xcrun', '-f', '--sdk', 'watchsimulator', 'clang++']).decode('utf-8').strip()
-sdk_path = subprocess.check_output(['xcrun', '--sdk', 'watchsimulator', '--show-sdk-path']).strip()
+sdk_path = subprocess.check_output(['xcrun', '--sdk', 'watchsimulator', '--show-sdk-path']).decode('utf-8').strip()
CCFLAGS = "-isysroot {} -mwatchos-version-min=4.2 -arch i386 -fembed-bitcode -fapplication-extension".format(sdk_path)
LINKFLAGS = "-Wl,-syslibroot,{} -mwatchos-version-min=4.2 -arch i386 -fembed-bitcode -fapplication-extension".format(sdk_path)
diff --git a/jstests/free_mon/libs/mock_http_common.py b/jstests/free_mon/libs/mock_http_common.py
index fe4c428ef6f..14cc6ba5a21 100644
--- a/jstests/free_mon/libs/mock_http_common.py
+++ b/jstests/free_mon/libs/mock_http_common.py
@@ -7,6 +7,7 @@ URL_PATH_LAST_METRICS = "/last_metrics"
URL_DISABLE_FAULTS = "/disable_faults"
URL_ENABLE_FAULTS = "/enable_faults"
+
class Stats:
"""Stats class shared between client and server."""
diff --git a/jstests/free_mon/libs/mock_http_control.py b/jstests/free_mon/libs/mock_http_control.py
index 8f2469155cb..778450dd374 100644
--- a/jstests/free_mon/libs/mock_http_control.py
+++ b/jstests/free_mon/libs/mock_http_control.py
@@ -11,6 +11,7 @@ import urllib.request
import mock_http_common
+
def main():
"""Main entry point."""
parser = argparse.ArgumentParser(description='MongoDB Mock Free Monitoring Endpoint.')
diff --git a/jstests/free_mon/libs/mock_http_server.py b/jstests/free_mon/libs/mock_http_server.py
index f6ce1a39299..c5609c8fe35 100644
--- a/jstests/free_mon/libs/mock_http_server.py
+++ b/jstests/free_mon/libs/mock_http_server.py
@@ -23,22 +23,16 @@ last_metrics = None
last_register = None
disable_faults = False
fault_type = None
-
"""Fault which causes the server to return an HTTP failure on register."""
FAULT_FAIL_REGISTER = "fail_register"
-
"""Fault which causes the server to return a response with a document with a bad version."""
FAULT_INVALID_REGISTER = "invalid_register"
-
"""Fault which causes metrics to return halt after 5 metric uploads have occurred."""
FAULT_HALT_METRICS_5 = "halt_metrics_5"
-
"""Fault which causes metrics to return permanentlyDelete = true after 3 uploads."""
FAULT_PERMANENTLY_DELETE_AFTER_3 = "permanently_delete_after_3"
-
"""Fault which causes metrics to trigger resentRegistration at 3 uploads."""
FAULT_RESEND_REGISTRATION_AT_3 = "resend_registration_at_3"
-
"""Fault which causes metrics to trigger resentRegistration once."""
FAULT_RESEND_REGISTRATION_ONCE = "resend_registration_once"
@@ -132,14 +126,20 @@ class FreeMonHandler(http.server.BaseHTTPRequestHandler):
})
else:
data = bson.BSON.encode({
- 'version': bson.int64.Int64(1),
- 'haltMetricsUploading': False,
- 'id': 'mock123',
- 'informationalURL': 'http://www.example.com/123',
- 'message': 'Welcome to the Mock Free Monitoring Endpoint',
- 'reportingInterval': bson.int64.Int64(1),
+ 'version':
+ bson.int64.Int64(1),
+ 'haltMetricsUploading':
+ False,
+ 'id':
+ 'mock123',
+ 'informationalURL':
+ 'http://www.example.com/123',
+ 'message':
+ 'Welcome to the Mock Free Monitoring Endpoint',
+ 'reportingInterval':
+ bson.int64.Int64(1),
'userReminder':
-"""To see your monitoring data, navigate to the unique URL below.
+ """To see your monitoring data, navigate to the unique URL below.
Anyone you share the URL with will also be able to view this page.
https://localhost:8080/someUUID6v5jLKTIZZklDvN5L8sZ
@@ -196,7 +196,7 @@ You can disable monitoring at any time by running db.disableFreeMonitoring()."""
'id': 'mock123',
'reportingInterval': bson.int64.Int64(1),
'message': 'Thanks for all the metrics',
- 'resendRegistration' : True,
+ 'resendRegistration': True,
})
elif not disable_faults and \
stats.metrics_calls == 3 and fault_type == FAULT_RESEND_REGISTRATION_AT_3:
@@ -208,7 +208,7 @@ You can disable monitoring at any time by running db.disableFreeMonitoring()."""
'id': 'mock123',
'reportingInterval': bson.int64.Int64(1),
'message': 'Thanks for all the metrics',
- 'resendRegistration' : True,
+ 'resendRegistration': True,
})
else:
data = bson.BSON.encode({
@@ -250,6 +250,7 @@ You can disable monitoring at any time by running db.disableFreeMonitoring()."""
disable_faults = False
self._send_header()
+
def run(port, server_class=http.server.HTTPServer, handler_class=FreeMonHandler):
"""Run web server."""
server_address = ('', port)
@@ -284,7 +285,8 @@ def main():
if args.fault:
if args.fault not in SUPPORTED_FAULT_TYPES:
- print("Unsupported fault type %s, supports types are %s" % (args.fault, SUPPORTED_FAULT_TYPES))
+ print("Unsupported fault type %s, supports types are %s" % (args.fault,
+ SUPPORTED_FAULT_TYPES))
sys.exit(1)
fault_type = args.fault
diff --git a/jstests/noPassthrough/libs/configExpand/reflect.py b/jstests/noPassthrough/libs/configExpand/reflect.py
index d4e0f9a88d4..2852e3d1355 100644
--- a/jstests/noPassthrough/libs/configExpand/reflect.py
+++ b/jstests/noPassthrough/libs/configExpand/reflect.py
@@ -7,11 +7,13 @@ import argparse
import sys
import time
+
def main():
"""Main Method."""
parser = argparse.ArgumentParser(description='MongoDB Mock Config Expandsion EXEC Endpoint.')
- parser.add_argument('-s', '--sleep', type=int, default=0, help="Add artificial delay for timeout testing")
+ parser.add_argument('-s', '--sleep', type=int, default=0,
+ help="Add artificial delay for timeout testing")
parser.add_argument('value', type=str, help="Content to reflect to stdout")
args = parser.parse_args()
@@ -25,5 +27,6 @@ def main():
sys.stdout.write(args.value)
+
if __name__ == '__main__':
main()
diff --git a/jstests/noPassthrough/libs/configExpand/rest_server.py b/jstests/noPassthrough/libs/configExpand/rest_server.py
index 9e30deb2c12..f1704e8fe77 100644
--- a/jstests/noPassthrough/libs/configExpand/rest_server.py
+++ b/jstests/noPassthrough/libs/configExpand/rest_server.py
@@ -10,6 +10,7 @@ import urllib.parse
connect_count = 0
+
class ConfigExpandRestHandler(http.server.BaseHTTPRequestHandler):
"""
Handle requests from mongod during config expansion.
@@ -73,6 +74,7 @@ class ConfigExpandRestHandler(http.server.BaseHTTPRequestHandler):
self.end_headers()
self.wfile.write('POST not supported')
+
def run(port):
"""Run web server."""
@@ -83,6 +85,7 @@ def run(port):
print("Mock Web Server Listening on %s" % (str(server_address)))
httpd.serve_forever()
+
def main():
"""Main Method."""
diff --git a/jstests/ssl/tls_enumerator.py b/jstests/ssl/tls_enumerator.py
index fdc61e24f78..126d3acbb34 100644
--- a/jstests/ssl/tls_enumerator.py
+++ b/jstests/ssl/tls_enumerator.py
@@ -34,7 +34,8 @@ def enumerate_tls_ciphers(protocol_options, host, port, cert, cafile):
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='MongoDB TLS Cipher Suite Enumerator')
parser.add_argument('--port', type=int, default=27017, help='Port to connect to')
- parser.add_argument('-o', '--outfile', type=str, default='ciphers.json', help='file to write the output to')
+ parser.add_argument('-o', '--outfile', type=str, default='ciphers.json',
+ help='file to write the output to')
parser.add_argument('--host', type=str, default='localhost', help='host to connect to')
parser.add_argument('--cafile', type=str, help='Path to CA certificate')
parser.add_argument('--cert', type=str, help='Path to client certificate')
@@ -68,11 +69,8 @@ if __name__ == '__main__':
}
results = {
- key: enumerate_tls_ciphers(protocol_options=proto,
- host=args.host,
- port=args.port,
- cafile=args.cafile,
- cert=args.cert)
+ key: enumerate_tls_ciphers(protocol_options=proto, host=args.host, port=args.port,
+ cafile=args.cafile, cert=args.cert)
for key, proto in suites.items()
}
diff --git a/mypy.ini b/mypy.ini
index 8cf0f9075b2..a3917513153 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,6 +1,5 @@
[mypy]
-# Check Python 2 code for type annotations in comments.
-python_version = 2.7
+python_version = 3.7
disallow_untyped_defs = False
# Do not error on imported files since all imported files may not be mypy clean.
diff --git a/pytests/powertest.py b/pytests/powertest.py
index 5ae6f7b717f..610ea8980da 100755
--- a/pytests/powertest.py
+++ b/pytests/powertest.py
@@ -45,31 +45,13 @@ import time
import traceback
import urlparse
import zipfile
+import subprocess
import psutil
import pymongo
import requests
import yaml
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may"
- " trigger an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
- import subprocess # type: ignore
-else:
- import subprocess
-
-# We replace the subprocess module imported by the psutil package so we can safely use
-# psutil.Popen() in addition to subprocess.Popen().
-psutil.subprocess = subprocess
-
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -215,9 +197,10 @@ def register_signal_handler(handler):
atexit.register(win32api.CloseHandle, task_timeout_handle)
# Create thread.
- event_handler_thread = threading.Thread(target=_handle_set_event, kwargs={
- "event_handle": task_timeout_handle, "handler": handler
- }, name="windows_event_handler_thread")
+ event_handler_thread = threading.Thread(
+ target=_handle_set_event,
+ kwargs={"event_handle": task_timeout_handle,
+ "handler": handler}, name="windows_event_handler_thread")
event_handler_thread.daemon = True
event_handler_thread.start()
else:
@@ -1595,13 +1578,12 @@ def mongo_seed_docs(mongo, db_name, coll_name, num_docs):
base_num = 100000
bulk_num = min(num_docs, 10000)
bulk_loops = num_docs / bulk_num
- for _ in xrange(bulk_loops):
+ for _ in range(bulk_loops):
num_coll_docs = mongo[db_name][coll_name].count()
if num_coll_docs >= num_docs:
break
mongo[db_name][coll_name].insert_many(
- [{"x": random.randint(0, base_num), "doc": rand_string(1024)}
- for _ in xrange(bulk_num)])
+ [{"x": random.randint(0, base_num), "doc": rand_string(1024)} for _ in range(bulk_num)])
LOGGER.info("After seeding there are %d documents in the collection",
mongo[db_name][coll_name].count())
return 0
@@ -1742,10 +1724,10 @@ Examples:
" -o StrictHostKeyChecking=no"
" -o ConnectTimeout=30"
" -o ConnectionAttempts=20")
- test_options.add_option("--sshConnection", dest="ssh_connection_options",
- help="Server ssh additional connection options, i.e., '-i ident.pem'"
- " which are added to '{}'".format(default_ssh_connection_options),
- default=None)
+ test_options.add_option(
+ "--sshConnection", dest="ssh_connection_options",
+ help="Server ssh additional connection options, i.e., '-i ident.pem'"
+ " which are added to '{}'".format(default_ssh_connection_options), default=None)
test_options.add_option("--testLoops", dest="num_loops",
help="Number of powercycle loops to run [default: %default]",
@@ -1763,35 +1745,38 @@ Examples:
help="Files excluded from rsync of the data directory", action="append",
default=None)
- test_options.add_option("--backupPathBefore", dest="backup_path_before",
- help="Path where the db_path is backed up before crash recovery,"
- " defaults to '<rootDir>/data-beforerecovery'", default=None)
+ test_options.add_option(
+ "--backupPathBefore", dest="backup_path_before",
+ help="Path where the db_path is backed up before crash recovery,"
+ " defaults to '<rootDir>/data-beforerecovery'", default=None)
- test_options.add_option("--backupPathAfter", dest="backup_path_after",
- help="Path where the db_path is backed up after crash recovery,"
- " defaults to '<rootDir>/data-afterrecovery'", default=None)
+ test_options.add_option(
+ "--backupPathAfter", dest="backup_path_after",
+ help="Path where the db_path is backed up after crash recovery,"
+ " defaults to '<rootDir>/data-afterrecovery'", default=None)
validate_locations = ["local", "remote"]
- test_options.add_option("--validate", dest="validate_collections",
- help="Run validate on all collections after mongod restart after"
- " a powercycle. Choose from {} to specify where the"
- " validate runs.".format(validate_locations),
- choices=validate_locations, default=None)
+ test_options.add_option(
+ "--validate", dest="validate_collections",
+ help="Run validate on all collections after mongod restart after"
+ " a powercycle. Choose from {} to specify where the"
+ " validate runs.".format(validate_locations), choices=validate_locations, default=None)
canary_locations = ["local", "remote"]
- test_options.add_option("--canary", dest="canary",
- help="Generate and validate canary document between powercycle"
- " events. Choose from {} to specify where the canary is"
- " generated from. If the 'crashMethod' is not 'internal"
- " then this option must be 'local'.".format(canary_locations),
- choices=canary_locations, default=None)
+ test_options.add_option(
+ "--canary", dest="canary", help="Generate and validate canary document between powercycle"
+ " events. Choose from {} to specify where the canary is"
+ " generated from. If the 'crashMethod' is not 'internal"
+ " then this option must be 'local'.".format(canary_locations), choices=canary_locations,
+ default=None)
test_options.add_option("--docForCanary", dest="canary_doc", help=optparse.SUPPRESS_HELP,
default="")
- test_options.add_option("--seedDocNum", dest="seed_doc_num",
- help="Number of documents to seed the default collection [default:"
- " %default]", type="int", default=0)
+ test_options.add_option(
+ "--seedDocNum", dest="seed_doc_num",
+ help="Number of documents to seed the default collection [default:"
+ " %default]", type="int", default=0)
test_options.add_option("--dbName", dest="db_name", help=optparse.SUPPRESS_HELP,
default="power")
@@ -1799,9 +1784,9 @@ Examples:
test_options.add_option("--collectionName", dest="collection_name", help=optparse.SUPPRESS_HELP,
default="cycle")
- test_options.add_option("--writeConcern", dest="write_concern",
- help="mongo (shell) CRUD client writeConcern, i.e.,"
- " '{\"w\": \"majority\"}' [default: '%default']", default="{}")
+ test_options.add_option(
+ "--writeConcern", dest="write_concern", help="mongo (shell) CRUD client writeConcern, i.e.,"
+ " '{\"w\": \"majority\"}' [default: '%default']", default="{}")
test_options.add_option("--readConcernLevel", dest="read_concern_level",
help="mongo (shell) CRUD client readConcernLevel, i.e.,"
@@ -1809,42 +1794,46 @@ Examples:
# Crash options
crash_methods = ["aws_ec2", "internal", "kill", "mpower"]
- crash_options.add_option("--crashMethod", dest="crash_method", choices=crash_methods,
- help="Crash methods: {} [default: '%default']."
- " Select 'aws_ec2' to force-stop/start an AWS instance."
- " Select 'internal' to crash the remote server through an"
- " internal command, i.e., sys boot (Linux) or notmyfault (Windows)."
- " Select 'kill' to perform an unconditional kill of mongod,"
- " which will keep the remote server running."
- " Select 'mpower' to use the mFi mPower to cutoff power to"
- " the remote server.".format(crash_methods), default="internal")
+ crash_options.add_option(
+ "--crashMethod", dest="crash_method", choices=crash_methods,
+ help="Crash methods: {} [default: '%default']."
+ " Select 'aws_ec2' to force-stop/start an AWS instance."
+ " Select 'internal' to crash the remote server through an"
+ " internal command, i.e., sys boot (Linux) or notmyfault (Windows)."
+ " Select 'kill' to perform an unconditional kill of mongod,"
+ " which will keep the remote server running."
+ " Select 'mpower' to use the mFi mPower to cutoff power to"
+ " the remote server.".format(crash_methods), default="internal")
aws_address_types = [
"private_ip_address", "public_ip_address", "private_dns_name", "public_dns_name"
]
- crash_options.add_option("--crashOption", dest="crash_option",
- help="Secondary argument for the following --crashMethod:"
- " 'aws_ec2': specify EC2 'address_type', which is one of {} and"
- " defaults to 'public_ip_address'."
- " 'mpower': specify output<num> to turn"
- " off/on, i.e., 'output1' (REQUIRED)."
- " 'internal': for Windows, optionally specify a crash method,"
- " i.e., 'notmyfault/notmyfaultc64.exe"
- " -accepteula crash 1'".format(aws_address_types), default=None)
-
- crash_options.add_option("--instanceId", dest="instance_id",
- help="The instance ID of an AWS EC2 host. If specified, this instance"
- " will be started after a crash, if it is not in a running state."
- " This is required if --crashOption is 'aws_ec2'.", default=None)
-
- crash_options.add_option("--crashWaitTime", dest="crash_wait_time",
- help="Time, in seconds, to wait before issuing crash [default:"
- " %default]", type="int", default=30)
-
- crash_options.add_option("--jitterForCrashWaitTime", dest="crash_wait_time_jitter",
- help="The maximum time, in seconds, to be added to --crashWaitTime,"
- " as a uniform distributed random value, [default: %default]",
- type="int", default=10)
+ crash_options.add_option(
+ "--crashOption", dest="crash_option",
+ help="Secondary argument for the following --crashMethod:"
+ " 'aws_ec2': specify EC2 'address_type', which is one of {} and"
+ " defaults to 'public_ip_address'."
+ " 'mpower': specify output<num> to turn"
+ " off/on, i.e., 'output1' (REQUIRED)."
+ " 'internal': for Windows, optionally specify a crash method,"
+ " i.e., 'notmyfault/notmyfaultc64.exe"
+ " -accepteula crash 1'".format(aws_address_types), default=None)
+
+ crash_options.add_option(
+ "--instanceId", dest="instance_id",
+ help="The instance ID of an AWS EC2 host. If specified, this instance"
+ " will be started after a crash, if it is not in a running state."
+ " This is required if --crashOption is 'aws_ec2'.", default=None)
+
+ crash_options.add_option(
+ "--crashWaitTime", dest="crash_wait_time",
+ help="Time, in seconds, to wait before issuing crash [default:"
+ " %default]", type="int", default=30)
+
+ crash_options.add_option(
+ "--jitterForCrashWaitTime", dest="crash_wait_time_jitter",
+ help="The maximum time, in seconds, to be added to --crashWaitTime,"
+ " as a uniform distributed random value, [default: %default]", type="int", default=10)
crash_options.add_option("--sshCrashUserHost", dest="ssh_crash_user_host",
help="The crash host's user@host for performing the crash.",
@@ -1855,31 +1844,34 @@ Examples:
default=None)
# MongoDB options
- mongodb_options.add_option("--downloadUrl", dest="tarball_url",
- help="URL of tarball to test, if unspecifed latest tarball will be"
- " used", default="latest")
+ mongodb_options.add_option(
+ "--downloadUrl", dest="tarball_url",
+ help="URL of tarball to test, if unspecifed latest tarball will be"
+ " used", default="latest")
- mongodb_options.add_option("--rootDir", dest="root_dir",
- help="Root directory, on remote host, to install tarball and data"
- " directory [default: 'mongodb-powertest-<epochSecs>']",
- default=None)
+ mongodb_options.add_option(
+ "--rootDir", dest="root_dir",
+ help="Root directory, on remote host, to install tarball and data"
+ " directory [default: 'mongodb-powertest-<epochSecs>']", default=None)
- mongodb_options.add_option("--mongodbBinDir", dest="mongodb_bin_dir",
- help="Directory, on remote host, containing mongoDB binaries,"
- " overrides bin from tarball in --downloadUrl", default=None)
+ mongodb_options.add_option(
+ "--mongodbBinDir", dest="mongodb_bin_dir",
+ help="Directory, on remote host, containing mongoDB binaries,"
+ " overrides bin from tarball in --downloadUrl", default=None)
- mongodb_options.add_option("--dbPath", dest="db_path",
- help="Data directory to use, on remote host, if unspecified"
- " it will be '<rootDir>/data/db'", default=None)
+ mongodb_options.add_option(
+ "--dbPath", dest="db_path", help="Data directory to use, on remote host, if unspecified"
+ " it will be '<rootDir>/data/db'", default=None)
- mongodb_options.add_option("--logPath", dest="log_path",
- help="Log path, on remote host, if unspecified"
- " it will be '<rootDir>/log/mongod.log'", default=None)
+ mongodb_options.add_option(
+ "--logPath", dest="log_path", help="Log path, on remote host, if unspecified"
+ " it will be '<rootDir>/log/mongod.log'", default=None)
# mongod options
- mongod_options.add_option("--replSet", dest="repl_set",
- help="Name of mongod single node replica set, if unpsecified mongod"
- " defaults to standalone node", default=None)
+ mongod_options.add_option(
+ "--replSet", dest="repl_set",
+ help="Name of mongod single node replica set, if unpsecified mongod"
+ " defaults to standalone node", default=None)
# The current host used to start and connect to mongod. Not meant to be specified
# by the user.
@@ -1892,10 +1884,10 @@ Examples:
default=None)
# The ports used on the 'server' side when in standard or secret mode.
- mongod_options.add_option("--mongodUsablePorts", dest="usable_ports", nargs=2,
- help="List of usable ports to be used by mongod for"
- " standard and secret modes, [default: %default]", type="int",
- default=[27017, 37017])
+ mongod_options.add_option(
+ "--mongodUsablePorts", dest="usable_ports", nargs=2,
+ help="List of usable ports to be used by mongod for"
+ " standard and secret modes, [default: %default]", type="int", default=[27017, 37017])
mongod_options.add_option("--mongodOptions", dest="mongod_options",
help="Additional mongod options", default="")
@@ -1903,97 +1895,109 @@ Examples:
mongod_options.add_option("--fcv", dest="fcv_version",
help="Set the FeatureCompatibilityVersion of mongod.", default=None)
- mongod_options.add_option("--removeLockFile", dest="remove_lock_file",
- help="If specified, the mongod.lock file will be deleted after a"
- " powercycle event, before mongod is started. This is a"
- " workaround for mongod failing start with MMAPV1 (See"
- " SERVER-15109).", action="store_true", default=False)
+ mongod_options.add_option(
+ "--removeLockFile", dest="remove_lock_file",
+ help="If specified, the mongod.lock file will be deleted after a"
+ " powercycle event, before mongod is started. This is a"
+ " workaround for mongod failing start with MMAPV1 (See"
+ " SERVER-15109).", action="store_true", default=False)
# Client options
mongo_path = distutils.spawn.find_executable("mongo",
os.getcwd() + os.pathsep + os.environ["PATH"])
- client_options.add_option("--mongoPath", dest="mongo_path",
- help="Path to mongo (shell) executable, if unspecifed, mongo client"
- " is launched from the current directory.", default=mongo_path)
+ client_options.add_option(
+ "--mongoPath", dest="mongo_path",
+ help="Path to mongo (shell) executable, if unspecifed, mongo client"
+ " is launched from the current directory.", default=mongo_path)
- client_options.add_option("--mongoRepoRootDir", dest="mongo_repo_root_dir",
- help="Root directory of mongoDB repository, defaults to current"
- " directory.", default=None)
+ client_options.add_option(
+ "--mongoRepoRootDir", dest="mongo_repo_root_dir",
+ help="Root directory of mongoDB repository, defaults to current"
+ " directory.", default=None)
- client_options.add_option("--crudClient", dest="crud_client",
- help="The path to the CRUD client script on the local host"
- " [default: '%default'].", default="jstests/hooks/crud_client.js")
+ client_options.add_option(
+ "--crudClient", dest="crud_client",
+ help="The path to the CRUD client script on the local host"
+ " [default: '%default'].", default="jstests/hooks/crud_client.js")
with_external_server = "buildscripts/resmokeconfig/suites/with_external_server.yml"
- client_options.add_option("--configCrudClient", dest="config_crud_client",
- help="The path to the CRUD client configuration YML file on the"
- " local host. This is the resmoke.py suite file. If unspecified,"
- " a default configuration YML file (%default) will be used that"
- " provides a mongo (shell) DB connection to a running mongod.",
- default=with_external_server)
-
- client_options.add_option("--numCrudClients", dest="num_crud_clients",
- help="The number of concurrent CRUD clients to run"
- " [default: '%default'].", type="int", default=1)
-
- client_options.add_option("--numFsmClients", dest="num_fsm_clients",
- help="The number of concurrent FSM clients to run"
- " [default: '%default'].", type="int", default=0)
-
- client_options.add_option("--fsmWorkloadFiles", dest="fsm_workload_files",
- help="A list of the FSM workload files to execute. More than one"
- " file can be specified either in a comma-delimited string,"
- " or by specifying this option more than once. If unspecified,"
- " then all FSM workload files are executed.", action="append",
- default=[])
-
- client_options.add_option("--fsmWorkloadBlacklistFiles", dest="fsm_workload_blacklist_files",
- help="A list of the FSM workload files to blacklist. More than one"
- " file can be specified either in a comma-delimited string,"
- " or by specifying this option more than once. Note the"
- " file name is the basename, i.e., 'distinct.js'.", action="append",
- default=[])
+ client_options.add_option(
+ "--configCrudClient", dest="config_crud_client",
+ help="The path to the CRUD client configuration YML file on the"
+ " local host. This is the resmoke.py suite file. If unspecified,"
+ " a default configuration YML file (%default) will be used that"
+ " provides a mongo (shell) DB connection to a running mongod.",
+ default=with_external_server)
+
+ client_options.add_option(
+ "--numCrudClients", dest="num_crud_clients",
+ help="The number of concurrent CRUD clients to run"
+ " [default: '%default'].", type="int", default=1)
+
+ client_options.add_option(
+ "--numFsmClients", dest="num_fsm_clients",
+ help="The number of concurrent FSM clients to run"
+ " [default: '%default'].", type="int", default=0)
+
+ client_options.add_option(
+ "--fsmWorkloadFiles", dest="fsm_workload_files",
+ help="A list of the FSM workload files to execute. More than one"
+ " file can be specified either in a comma-delimited string,"
+ " or by specifying this option more than once. If unspecified,"
+ " then all FSM workload files are executed.", action="append", default=[])
+
+ client_options.add_option(
+ "--fsmWorkloadBlacklistFiles", dest="fsm_workload_blacklist_files",
+ help="A list of the FSM workload files to blacklist. More than one"
+ " file can be specified either in a comma-delimited string,"
+ " or by specifying this option more than once. Note the"
+ " file name is the basename, i.e., 'distinct.js'.", action="append", default=[])
# Program options
- program_options.add_option("--configFile", dest="config_file",
- help="YAML configuration file of program options."
- " Option values are mapped to command line option names."
- " The command line option overrides any specified options"
- " from this file.", default=None)
-
- program_options.add_option("--saveConfigOptions", dest="save_config_options",
- help="Save the program options to a YAML configuration file."
- " If this options is specified the program only saves"
- " the configuration file and exits.", default=None)
-
- program_options.add_option("--reportJsonFile", dest="report_json_file",
- help="Create or update the specified report file upon program"
- " exit.", default=None)
-
- program_options.add_option("--exitYamlFile", dest="exit_yml_file",
- help="If specified, create a YAML file on exit containing"
- " exit code.", default=None)
-
- program_options.add_option("--remotePython", dest="remote_python",
- help="The python intepreter to use on the remote host"
- " [default: '%default']."
- " To be able to use a python virtual environment,"
- " which has already been provisioned on the remote"
- " host, specify something similar to this:"
- " 'source venv/bin/activate; python'", default="python")
-
- program_options.add_option("--remoteSudo", dest="remote_sudo",
- help="Use sudo on the remote host for priveleged operations."
- " [default: %default]."
- " For non-Windows systems, in order to perform privileged"
- " operations on the remote host, specify this, if the"
- " remote user is not able to perform root operations.",
- action="store_true", default=False)
+ program_options.add_option(
+ "--configFile", dest="config_file", help="YAML configuration file of program options."
+ " Option values are mapped to command line option names."
+ " The command line option overrides any specified options"
+ " from this file.", default=None)
+
+ program_options.add_option(
+ "--saveConfigOptions", dest="save_config_options",
+ help="Save the program options to a YAML configuration file."
+ " If this options is specified the program only saves"
+ " the configuration file and exits.", default=None)
+
+ program_options.add_option(
+ "--reportJsonFile", dest="report_json_file",
+ help="Create or update the specified report file upon program"
+ " exit.", default=None)
+
+ program_options.add_option(
+ "--exitYamlFile", dest="exit_yml_file",
+ help="If specified, create a YAML file on exit containing"
+ " exit code.", default=None)
+
+ program_options.add_option(
+ "--remotePython", dest="remote_python",
+ help="The python intepreter to use on the remote host"
+ " [default: '%default']."
+ " To be able to use a python virtual environment,"
+ " which has already been provisioned on the remote"
+ " host, specify something similar to this:"
+ " 'source venv/bin/activate; python'", default="python")
+
+ program_options.add_option(
+ "--remoteSudo", dest="remote_sudo",
+ help="Use sudo on the remote host for priveleged operations."
+ " [default: %default]."
+ " For non-Windows systems, in order to perform privileged"
+ " operations on the remote host, specify this, if the"
+ " remote user is not able to perform root operations.", action="store_true", default=False)
log_levels = ["debug", "info", "warning", "error"]
- program_options.add_option("--logLevel", dest="log_level", choices=log_levels,
- help="The log level. Accepted values are: {}."
- " [default: '%default'].".format(log_levels), default="info")
+ program_options.add_option(
+ "--logLevel", dest="log_level", choices=log_levels,
+ help="The log level. Accepted values are: {}."
+ " [default: '%default'].".format(log_levels), default="info")
program_options.add_option("--logFile", dest="log_file",
help="The destination file for the log output. Defaults to stdout.",
@@ -2228,9 +2232,9 @@ Examples:
# As described in http://man7.org/linux/man-pages/man5/ssh_config.5.html, ssh uses the value of
# the first occurrence for each parameter, so we have the default connection options follow the
# user-specified --sshConnection options.
- ssh_connection_options = "{} {}".format(options.ssh_connection_options
- if options.ssh_connection_options else "",
- default_ssh_connection_options)
+ ssh_connection_options = "{} {}".format(
+ options.ssh_connection_options if options.ssh_connection_options else "",
+ default_ssh_connection_options)
# For remote operations requiring sudo, force pseudo-tty allocation,
# see https://stackoverflow.com/questions/10310299/proper-way-to-sudo-over-ssh.
# Note - the ssh option RequestTTY was added in OpenSSH 5.9, so we use '-tt'.
@@ -2246,8 +2250,7 @@ Examples:
if not is_instance_running(ret, aws_status):
LOGGER.error("AWS instance is not running: %d %s", ret, aws_status)
local_exit(1)
- if (ssh_host == aws_status.private_ip_address
- or ssh_host == aws_status.private_dns_name):
+ if ssh_host in (aws_status.private_ip_address, aws_status.private_dns_name):
address_type = "private_ip_address"
# Instantiate the local handler object.
@@ -2354,11 +2357,11 @@ Examples:
" {seed_docs}").format(
rsync_opt=rsync_opt, canary_opt=canary_opt, host=mongod_host,
port=secret_port, rsync_cmd=rsync_cmd,
- remove_lock_file_cmd=remove_lock_file_cmd, set_fcv_cmd=set_fcv_cmd
- if loop_num == 1 else "",
+ remove_lock_file_cmd=remove_lock_file_cmd,
+ set_fcv_cmd=set_fcv_cmd if loop_num == 1 else "",
validate_collections_cmd=validate_collections_cmd,
- validate_canary_cmd=validate_canary_cmd, seed_docs=seed_docs
- if loop_num == 1 else "")
+ validate_canary_cmd=validate_canary_cmd,
+ seed_docs=seed_docs if loop_num == 1 else "")
ret, output = call_remote_operation(local_ops, options.remote_python, script_name,
client_args, remote_operation)
rsync_text = "rsync_data beforerecovery & " if options.rsync_data else ""
@@ -2424,7 +2427,7 @@ Examples:
# Start CRUD clients
host_port = "{}:{}".format(mongod_host, standard_port)
- for i in xrange(options.num_crud_clients):
+ for i in range(options.num_crud_clients):
if options.config_crud_client == with_external_server:
crud_config_file = NamedTempFile.create(suffix=".yml", directory="tmp")
crud_test_data["collectionName"] = "{}-{}".format(options.collection_name, i)
@@ -2440,11 +2443,11 @@ Examples:
LOGGER.info("****Started %d CRUD client(s)****", options.num_crud_clients)
# Start FSM clients
- for i in xrange(options.num_fsm_clients):
+ for i in range(options.num_fsm_clients):
fsm_config_file = NamedTempFile.create(suffix=".yml", directory="tmp")
fsm_test_data["dbNamePrefix"] = "fsm-{}".format(i)
# Do collection validation only for the first FSM client.
- fsm_test_data["validateCollections"] = True if i == 0 else False
+ fsm_test_data["validateCollections"] = bool(i == 0)
new_resmoke_config(with_external_server, fsm_config_file, fsm_test_data, eval_str)
_, _ = resmoke_client(work_dir=mongo_repo_root_dir, mongo_path=mongo_path,
host_port=host_port, js_test=fsm_client,
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py
index d2844a8d68b..e08104b0042 100644
--- a/site_scons/libdeps.py
+++ b/site_scons/libdeps.py
@@ -61,7 +61,7 @@ syslibdeps_env_var = 'SYSLIBDEPS'
missing_syslibdep = 'MISSING_LIBDEP_'
class dependency(object):
- Public, Private, Interface = range(3)
+ Public, Private, Interface = list(range(3))
def __init__(self, value, deptype):
self.target_node = value
@@ -70,28 +70,31 @@ class dependency(object):
def __str__(self):
return str(self.target_node)
+
dependency_visibility_ignored = {
- dependency.Public : dependency.Public,
- dependency.Private : dependency.Public,
- dependency.Interface : dependency.Public,
+ dependency.Public: dependency.Public,
+ dependency.Private: dependency.Public,
+ dependency.Interface: dependency.Public,
}
dependency_visibility_honored = {
- dependency.Public : dependency.Public,
- dependency.Private : dependency.Private,
- dependency.Interface : dependency.Interface,
+ dependency.Public: dependency.Public,
+ dependency.Private: dependency.Private,
+ dependency.Interface: dependency.Interface,
}
+
class DependencyCycleError(SCons.Errors.UserError):
"""Exception representing a cycle discovered in library dependencies."""
- def __init__(self, first_node ):
+ def __init__(self, first_node):
super(DependencyCycleError, self).__init__()
self.cycle_nodes = [first_node]
def __str__(self):
return "Library dependency cycle detected: " + " => ".join(str(n) for n in self.cycle_nodes)
+
def __get_sorted_direct_libdeps(node):
direct_sorted = getattr(node.attributes, "libdeps_direct_sorted", False)
if not direct_sorted:
@@ -100,8 +103,8 @@ def __get_sorted_direct_libdeps(node):
setattr(node.attributes, "libdeps_direct_sorted", direct_sorted)
return direct_sorted
-def __get_libdeps(node):
+def __get_libdeps(node):
"""Given a SCons Node, return its library dependencies, topologically sorted.
Computes the dependencies if they're not already cached.
@@ -133,7 +136,7 @@ def __get_libdeps(node):
marked.add(n.target_node)
tsorted.append(n.target_node)
- except DependencyCycleError, e:
+ except DependencyCycleError as e:
if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]:
e.cycle_nodes.insert(0, n.target_node)
raise
@@ -150,6 +153,7 @@ def __get_libdeps(node):
return tsorted
+
def __get_syslibdeps(node):
""" Given a SCons Node, return its system library dependencies.
@@ -161,11 +165,11 @@ def __get_syslibdeps(node):
for lib in __get_libdeps(node):
for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])):
if syslib:
- if type(syslib) in (str, unicode) and syslib.startswith(missing_syslibdep):
- print("Target '%s' depends on the availability of a "
+ if type(syslib) is str and syslib.startswith(missing_syslibdep):
+ print(("Target '%s' depends on the availability of a "
"system provided library for '%s', "
"but no suitable library was found during configuration." %
- (str(node), syslib[len(missing_syslibdep):]))
+ (str(node), syslib[len(missing_syslibdep):])))
node.get_env().Exit(1)
syslibdeps.append(syslib)
setattr(node.attributes, cached_var_name, syslibdeps)
@@ -181,17 +185,20 @@ def update_scanner(builder):
if old_scanner:
path_function = old_scanner.path_function
+
def new_scanner(node, env, path=()):
result = old_scanner.function(node, env, path)
result.extend(__get_libdeps(node))
return result
else:
path_function = None
+
def new_scanner(node, env, path=()):
return __get_libdeps(node)
builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
- path_function=path_function)
+ path_function=path_function)
+
def get_libdeps(source, target, env, for_signature):
"""Implementation of the special _LIBDEPS environment variable.
@@ -202,6 +209,7 @@ def get_libdeps(source, target, env, for_signature):
target = env.Flatten([target])
return __get_libdeps(target[0])
+
def get_libdeps_objs(source, target, env, for_signature):
objs = []
for lib in get_libdeps(source, target, env, for_signature):
@@ -209,6 +217,7 @@ def get_libdeps_objs(source, target, env, for_signature):
objs.extend(lib.sources)
return objs
+
def get_syslibdeps(source, target, env, for_signature):
deps = __get_syslibdeps(target[0])
lib_link_prefix = env.subst('$LIBLINKPREFIX')
@@ -220,7 +229,7 @@ def get_syslibdeps(source, target, env, for_signature):
# they're believed to represent library short names, that should be prefixed with -l
# or the compiler-specific equivalent. I.e., 'm' becomes '-lm', but 'File("m.a") is passed
# through whole cloth.
- if type(d) in (str, unicode):
+ if type(d) is str:
result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix))
else:
result.append(d)
@@ -234,8 +243,9 @@ def __append_direct_libdeps(node, prereq_nodes):
node.attributes.libdeps_direct = []
node.attributes.libdeps_direct.extend(prereq_nodes)
-def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibility_ignored, ignore_progdeps=False):
+def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibility_ignored,
+ ignore_progdeps=False):
def libdeps_emitter(target, source, env):
"""SCons emitter that takes values from the LIBDEPS environment variable and
converts them to File node objects, binding correct path information into
@@ -260,13 +270,20 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit
prog_builder = env['BUILDERS']['Program']
prog_node_factory = prog_builder.target_factory or env.File
- prereqs = [dependency(l, dependency_map[dependency.Public]) for l in env.get(libdeps_env_var, []) if l]
- prereqs.extend(dependency(l, dependency_map[dependency.Interface]) for l in env.get(libdeps_env_var + '_INTERFACE', []) if l)
- prereqs.extend(dependency(l, dependency_map[dependency.Private]) for l in env.get(libdeps_env_var + '_PRIVATE', []) if l)
+ prereqs = [
+ dependency(l, dependency_map[dependency.Public]) for l in env.get(libdeps_env_var, [])
+ if l
+ ]
+ prereqs.extend(
+ dependency(l, dependency_map[dependency.Interface])
+ for l in env.get(libdeps_env_var + '_INTERFACE', []) if l)
+ prereqs.extend(
+ dependency(l, dependency_map[dependency.Private])
+ for l in env.get(libdeps_env_var + '_PRIVATE', []) if l)
for prereq in prereqs:
- prereqWithIxes = SCons.Util.adjustixes(
- prereq.target_node, lib_builder.get_prefix(env), lib_builder.get_suffix(env))
+ prereqWithIxes = SCons.Util.adjustixes(prereq.target_node, lib_builder.get_prefix(env),
+ lib_builder.get_suffix(env))
prereq.target_node = lib_node_factory(prereqWithIxes)
for t in target:
@@ -283,10 +300,11 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit
visibility = dependent[1]
dependent = dependent[0]
- dependentWithIxes = SCons.Util.adjustixes(
- dependent, lib_builder.get_prefix(env), lib_builder.get_suffix(env))
+ dependentWithIxes = SCons.Util.adjustixes(dependent, lib_builder.get_prefix(env),
+ lib_builder.get_suffix(env))
dependentNode = lib_node_factory(dependentWithIxes)
- __append_direct_libdeps(dependentNode, [dependency(target[0], dependency_map[visibility])])
+ __append_direct_libdeps(dependentNode,
+ [dependency(target[0], dependency_map[visibility])])
if not ignore_progdeps:
for dependent in env.get('PROGDEPS_DEPENDENTS', []):
@@ -299,21 +317,24 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit
visibility = dependent[1]
dependent = dependent[0]
- dependentWithIxes = SCons.Util.adjustixes(
- dependent, prog_builder.get_prefix(env), prog_builder.get_suffix(env))
+ dependentWithIxes = SCons.Util.adjustixes(dependent, prog_builder.get_prefix(env),
+ prog_builder.get_suffix(env))
dependentNode = prog_node_factory(dependentWithIxes)
- __append_direct_libdeps(dependentNode, [dependency(target[0], dependency_map[visibility])])
+ __append_direct_libdeps(dependentNode,
+ [dependency(target[0], dependency_map[visibility])])
return target, source
return libdeps_emitter
+
def expand_libdeps_tags(source, target, env, for_signature):
results = []
for expansion in env.get('LIBDEPS_TAG_EXPANSIONS', []):
results.append(expansion(source, target, env, for_signature))
return results
+
def setup_environment(env, emitting_shared=False):
"""Set up the given build environment to do LIBDEPS tracking."""
@@ -339,19 +360,18 @@ def setup_environment(env, emitting_shared=False):
def make_indirect_emitter(variable):
def indirect_emitter(target, source, env):
return env[variable](target, source, env)
+
return indirect_emitter
env.Append(
LIBDEPS_LIBEMITTER=make_libdeps_emitter('StaticLibrary'),
LIBEMITTER=make_indirect_emitter('LIBDEPS_LIBEMITTER'),
-
LIBDEPS_SHAREMITTER=make_libdeps_emitter('SharedArchive', ignore_progdeps=True),
SHAREMITTER=make_indirect_emitter('LIBDEPS_SHAREMITTER'),
-
LIBDEPS_SHLIBEMITTER=make_libdeps_emitter('SharedLibrary', dependency_visibility_honored),
SHLIBEMITTER=make_indirect_emitter('LIBDEPS_SHLIBEMITTER'),
-
- LIBDEPS_PROGEMITTER=make_libdeps_emitter('SharedLibrary' if emitting_shared else 'StaticLibrary'),
+ LIBDEPS_PROGEMITTER=make_libdeps_emitter(
+ 'SharedLibrary' if emitting_shared else 'StaticLibrary'),
PROGEMITTER=make_indirect_emitter('LIBDEPS_PROGEMITTER'),
)
@@ -362,9 +382,11 @@ def setup_environment(env, emitting_shared=False):
if 'init-no-global-side-effects' in env.Entry(lib).get_env().get('LIBDEPS_TAGS', []):
result.append(str(lib))
else:
- result.extend(env.subst('$LINK_WHOLE_ARCHIVE_LIB_START'
- '$TARGET'
- '$LINK_WHOLE_ARCHIVE_LIB_END', target=lib).split())
+ result.extend(
+ env.subst(
+ '$LINK_WHOLE_ARCHIVE_LIB_START'
+ '$TARGET'
+ '$LINK_WHOLE_ARCHIVE_LIB_END', target=lib).split())
return result
env['_LIBDEPS_LIBS_WITH_TAGS'] = expand_libdeps_with_extraction_flags
@@ -382,6 +404,7 @@ def setup_environment(env, emitting_shared=False):
except KeyError:
pass
+
def setup_conftests(conf):
def FindSysLibDep(context, name, libs, **kwargs):
var = "LIBDEPS_" + name.upper() + "_SYSLIBDEP"
@@ -394,4 +417,5 @@ def setup_conftests(conf):
return context.Result(result)
context.env[var] = __missing_syslib(name)
return context.Result(result)
+
conf.AddTest('FindSysLibDep', FindSysLibDep)
diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py
index 510bd7bcc2f..f77478092b9 100644
--- a/site_scons/mongo/__init__.py
+++ b/site_scons/mongo/__init__.py
@@ -5,4 +5,4 @@
def print_build_failures():
from SCons.Script import GetBuildFailures
for bf in GetBuildFailures():
- print "%s failed: %s" % (bf.node, bf.errstr)
+ print("%s failed: %s" % (bf.node, bf.errstr))
diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py
index c07e86a4d14..052f18d3a0a 100644
--- a/site_scons/mongo/generators.py
+++ b/site_scons/mongo/generators.py
@@ -1,6 +1,6 @@
# -*- mode: python; -*-
-import md5
+import hashlib
# Default and alternative generator definitions go here.
@@ -15,22 +15,69 @@ import md5
# want to define them.
def default_buildinfo_environment_data():
return (
- ('distmod', '$MONGO_DISTMOD', True, True,),
- ('distarch', '$MONGO_DISTARCH', True, True,),
- ('cc', '$CC_VERSION', True, False,),
- ('ccflags', '$CCFLAGS', True, False,),
- ('cxx', '$CXX_VERSION', True, False,),
- ('cxxflags', '$CXXFLAGS', True, False,),
- ('linkflags', '$LINKFLAGS', True, False,),
- ('target_arch', '$TARGET_ARCH', True, True,),
- ('target_os', '$TARGET_OS', True, False,),
+ (
+ 'distmod',
+ '$MONGO_DISTMOD',
+ True,
+ True,
+ ),
+ (
+ 'distarch',
+ '$MONGO_DISTARCH',
+ True,
+ True,
+ ),
+ (
+ 'cc',
+ '$CC_VERSION',
+ True,
+ False,
+ ),
+ (
+ 'ccflags',
+ '$CCFLAGS',
+ True,
+ False,
+ ),
+ (
+ 'cxx',
+ '$CXX_VERSION',
+ True,
+ False,
+ ),
+ (
+ 'cxxflags',
+ '$CXXFLAGS',
+ True,
+ False,
+ ),
+ (
+ 'linkflags',
+ '$LINKFLAGS',
+ True,
+ False,
+ ),
+ (
+ 'target_arch',
+ '$TARGET_ARCH',
+ True,
+ True,
+ ),
+ (
+ 'target_os',
+ '$TARGET_OS',
+ True,
+ False,
+ ),
)
+
# If you want buildInfo and --version to be relatively empty, set
# MONGO_BUILDINFO_ENVIRONMENT_DATA = empty_buildinfo_environment_data()
def empty_buildinfo_environment_data():
return ()
+
def default_variant_dir_generator(target, source, env, for_signature):
if env.GetOption('cache') != None:
@@ -44,11 +91,11 @@ def default_variant_dir_generator(target, source, env, for_signature):
# Hash the named options and their values, and take the first 8 characters of the hash as
# the variant name
- hasher = md5.md5()
+ hasher = hashlib.md5()
for option in variant_options:
- hasher.update(option)
- hasher.update(str(env.GetOption(option)))
- variant_dir = hasher.hexdigest()[0:8]
+ hasher.update(option.encode('utf-8'))
+ hasher.update(str(env.GetOption(option)).encode('utf-8'))
+ variant_dir = str(hasher.hexdigest()[0:8])
# If our option hash yields a well known hash, replace it with its name.
known_variant_hashes = {
diff --git a/site_scons/mongo/platform.py b/site_scons/mongo/platform.py
index 05df9017ac4..f5d15b4924c 100644
--- a/site_scons/mongo/platform.py
+++ b/site_scons/mongo/platform.py
@@ -1,5 +1,4 @@
# -*- mode: python; -*-
-
"""
Support code related to OS detection in general. System specific facilities or customization
hooks live in mongo_platform_<PLATFORM>.py files.
@@ -12,6 +11,7 @@ import os
# This needs to precede the options section so that we can only offer some options on certain
# operating systems.
+
# This function gets the running OS as identified by Python
# It should only be used to set up defaults for options/variables, because
# its value could potentially be overridden by setting TARGET_OS on the
@@ -34,25 +34,28 @@ def get_running_os_name():
running_os = 'unknown'
return running_os
+
def env_get_os_name_wrapper(self):
return self['TARGET_OS']
+
def is_os_raw(target_os, os_list_to_check):
- darwin_os_list = [ 'macOS', 'tvOS', 'tvOS-sim', 'iOS', 'iOS-sim', 'watchOS', 'watchOS-sim' ]
- linux_os_list = [ 'android', 'linux' ]
- posix_os_list = [ 'openbsd', 'freebsd', 'solaris', 'emscripten' ] + darwin_os_list + linux_os_list
+ darwin_os_list = ['macOS', 'tvOS', 'tvOS-sim', 'iOS', 'iOS-sim', 'watchOS', 'watchOS-sim']
+ linux_os_list = ['android', 'linux']
+ posix_os_list = ['openbsd', 'freebsd', 'solaris', 'emscripten'] + darwin_os_list + linux_os_list
os_families = {
- "darwin": darwin_os_list,
- "posix": posix_os_list,
- "linux": linux_os_list,
+ "darwin": darwin_os_list,
+ "posix": posix_os_list,
+ "linux": linux_os_list,
}
for os in os_list_to_check:
- if os == target_os or ( os in os_families and target_os in os_families[os] ):
+ if os == target_os or (os in os_families and target_os in os_families[os]):
return True
return False
+
# This function tests the running OS as identified by Python
# It should only be used to set up defaults for options/variables, because
# its value could potentially be overridden by setting TARGET_OS on the
@@ -60,5 +63,6 @@ def is_os_raw(target_os, os_list_to_check):
def is_running_os(*os_list):
return is_os_raw(get_running_os_name(), os_list)
+
def env_os_is_wrapper(self, *os_list):
return is_os_raw(self['TARGET_OS'], os_list)
diff --git a/site_scons/mongo/toolchain.py b/site_scons/mongo/toolchain.py
index ffa74f56f6f..9bab92a68b7 100644
--- a/site_scons/mongo/toolchain.py
+++ b/site_scons/mongo/toolchain.py
@@ -6,31 +6,22 @@ import SCons
# Helper functions for generic toolchain things go here
+
def get_toolchain_ver(env, tool):
# By default we don't know the version of each tool, and only report what
# command gets executed (gcc vs /opt/mongodbtoolchain/bin/gcc).
verstr = "version unknown"
proc = None
if env.ToolchainIs('clang', 'gcc'):
- proc = SCons.Action._subproc(env,
- env.subst("${%s} --version" % tool),
- stdout=subprocess.PIPE,
- stderr='devnull',
- stdin='devnull',
- universal_newlines=True,
- error='raise',
- shell=True)
+ proc = SCons.Action._subproc(env, env.subst("${%s} --version" % tool),
+ stdout=subprocess.PIPE, stderr='devnull', stdin='devnull',
+ universal_newlines=True, error='raise', shell=True)
verstr = proc.stdout.readline()
elif env.ToolchainIs('msvc') and env.TargetOSIs('windows'):
- proc = SCons.Action._subproc(env,
- env.subst("${%s}" % tool),
- stdout='devnull',
- stderr=subprocess.PIPE,
- stdin='devnull',
- universal_newlines=True,
- error='raise',
- shell=True)
+ proc = SCons.Action._subproc(env, env.subst("${%s}" % tool), stdout='devnull',
+ stderr=subprocess.PIPE, stdin='devnull',
+ universal_newlines=True, error='raise', shell=True)
verstr = proc.stderr.readline()
# If we started a process, we should drain its stdout/stderr and wait for
diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py
index 65f2a995719..00f5e71a0a3 100644
--- a/site_scons/site_tools/abilink.py
+++ b/site_scons/site_tools/abilink.py
@@ -20,6 +20,7 @@ import subprocess
# TODO: Make a variable for the md5sum utility (allow any hasher)
# TODO: Add an ABILINKCOM variable to the Action, so it can be silenced.
+
def _detect(env):
try:
abidw = env['ABIDW']
@@ -31,6 +32,7 @@ def _detect(env):
return env.WhereIs('abidw')
+
def _add_emitter(builder):
base_emitter = builder.emitter
@@ -47,6 +49,7 @@ def _add_emitter(builder):
new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter])
builder.emitter = new_emitter
+
def _add_scanner(builder):
old_scanner = builder.target_scanner
path_function = old_scanner.path_function
@@ -59,16 +62,21 @@ def _add_scanner(builder):
new_results.append(abidw if abidw else base)
return new_results
- builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, path_function=path_function)
+ builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
+ path_function=path_function)
+
def _add_action(builder):
actions = builder.action
- builder.action = actions + SCons.Action.Action("$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw")
+ builder.action = actions + SCons.Action.Action(
+ "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw")
+
def exists(env):
result = _detect(env) != None
return result
+
def generate(env):
if not exists(env):
diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py
index 9a71b4919b7..6261058c268 100644
--- a/site_scons/site_tools/auto_install_binaries.py
+++ b/site_scons/site_tools/auto_install_binaries.py
@@ -1,24 +1,26 @@
import SCons
+
def exists(env):
return True
+
def generate(env):
env.Tool('install')
suffix_map = {
- env.subst('$PROGSUFFIX') : 'bin',
- '.dylib' : 'lib',
+ env.subst('$PROGSUFFIX'): 'bin',
+ '.dylib': 'lib',
# TODO: These 'lib' answers are incorrect. The location for the debug info
# should be the same as the target itself, which might be bin or lib. We need
# a solution for that. When that is fixed, add 'Program' back into the list
# of separate debug targets in the separate_debug.py tool.
- '.dSYM' : 'lib',
- '.debug' : 'lib',
- '.so' : 'lib',
- '.dll' : 'bin',
- '.lib' : 'lib',
+ '.dSYM': 'lib',
+ '.debug': 'lib',
+ '.so': 'lib',
+ '.dll': 'bin',
+ '.lib': 'lib',
}
def auto_install(env, target, source, **kwargs):
@@ -67,7 +69,8 @@ def generate(env):
if auto_install_location:
tentry_install_tags = env.get('INSTALL_ALIAS', [])
setattr(tentry.attributes, 'INSTALL_ALIAS', tentry_install_tags)
- install = env.AutoInstall(auto_install_location, tentry, INSTALL_ALIAS=tentry_install_tags)
+ install = env.AutoInstall(auto_install_location, tentry,
+ INSTALL_ALIAS=tentry_install_tags)
return (target, source)
def add_emitter(builder):
@@ -99,7 +102,7 @@ def generate(env):
from SCons.Tool import install
base_install_builder = install.BaseInstallBuilder
- assert(base_install_builder.target_scanner == None)
+ assert (base_install_builder.target_scanner == None)
base_install_builder.target_scanner = SCons.Scanner.Scanner(
function=scan_for_transitive_install,
diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py
index 161ac6061ea..07aa2d96e58 100644
--- a/site_scons/site_tools/compilation_db.py
+++ b/site_scons/site_tools/compilation_db.py
@@ -28,7 +28,7 @@ import itertools
# compilation database can access the complete list, and also so that the writer has easy
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
# communicate more gracefully?
-__COMPILATION_DB_ENTRIES=[]
+__COMPILATION_DB_ENTRIES = []
# Cribbed from Tool/cc.py and Tool/c++.py. It would be better if
# we could obtain this from SCons.
@@ -40,6 +40,7 @@ _CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++']
if SCons.Util.case_sensitive_suffixes('.c', '.C'):
_CXXSuffixes.append('.C')
+
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
# integrate with the cache, but there doesn't seem to be much call for it.
class __CompilationDbNode(SCons.Node.Python.Value):
@@ -77,11 +78,8 @@ def makeEmitCompilationDbEntry(comstr):
dbtarget = __CompilationDbNode(source)
entry = env.__COMPILATIONDB_Entry(
- target=dbtarget,
- source=[],
- __COMPILATIONDB_UTARGET=target,
- __COMPILATIONDB_USOURCE=source,
- __COMPILATIONDB_UACTION=user_action,
+ target=dbtarget, source=[], __COMPILATIONDB_UTARGET=target,
+ __COMPILATIONDB_USOURCE=source, __COMPILATIONDB_UACTION=user_action,
__COMPILATIONDB_ENV=env)
# TODO: Technically, these next two lines should not be required: it should be fine to
@@ -112,11 +110,11 @@ def CompilationDbEntryAction(target, source, env, **kw):
command = env['__COMPILATIONDB_UACTION'].strfunction(
target=env['__COMPILATIONDB_UTARGET'],
source=env['__COMPILATIONDB_USOURCE'],
- env=env['__COMPILATIONDB_ENV'],)
+ env=env['__COMPILATIONDB_ENV'],
+ )
entry = {
- "directory": env.Dir('#').abspath,
- "command": command,
+ "directory": env.Dir('#').abspath, "command": command,
"file": str(env['__COMPILATIONDB_USOURCE'][0])
}
@@ -130,21 +128,19 @@ def WriteCompilationDb(target, source, env):
entries.append(s.read())
with open(str(target[0]), 'w') as target_file:
- json.dump(entries, target_file,
- sort_keys=True,
- indent=4,
- separators=(',', ': '))
+ json.dump(entries, target_file, sort_keys=True, indent=4, separators=(',', ': '))
def ScanCompilationDb(node, env, path):
return __COMPILATION_DB_ENTRIES
+
def generate(env, **kwargs):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
- env['COMPILATIONDB_COMSTR'] = kwargs.get(
- 'COMPILATIONDB_COMSTR', 'Building compilation database $TARGET')
+ env['COMPILATIONDB_COMSTR'] = kwargs.get('COMPILATIONDB_COMSTR',
+ 'Building compilation database $TARGET')
components_by_suffix = itertools.chain(
itertools.product(_CSuffixes, [
@@ -163,28 +159,20 @@ def generate(env, **kwargs):
# Assumes a dictionary emitter
emitter = builder.emitter[suffix]
- builder.emitter[suffix] = SCons.Builder.ListEmitter(
- [
- emitter,
- makeEmitCompilationDbEntry(command),
- ]
- )
+ builder.emitter[suffix] = SCons.Builder.ListEmitter([
+ emitter,
+ makeEmitCompilationDbEntry(command),
+ ])
env['BUILDERS']['__COMPILATIONDB_Entry'] = SCons.Builder.Builder(
- action=SCons.Action.Action(CompilationDbEntryAction, None),
- )
+ action=SCons.Action.Action(CompilationDbEntryAction, None), )
env['BUILDERS']['__COMPILATIONDB_Database'] = SCons.Builder.Builder(
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
- target_scanner=SCons.Scanner.Scanner(
- function=ScanCompilationDb,
- node_class=None)
- )
+ target_scanner=SCons.Scanner.Scanner(function=ScanCompilationDb, node_class=None))
def CompilationDatabase(env, target):
- result = env.__COMPILATIONDB_Database(
- target=target,
- source=[])
+ result = env.__COMPILATIONDB_Database(target=target, source=[])
env.AlwaysBuild(result)
env.NoCache(result)
@@ -193,5 +181,6 @@ def generate(env, **kwargs):
env.AddMethod(CompilationDatabase, 'CompilationDatabase')
+
def exists(env):
return True
diff --git a/site_scons/site_tools/dagger/__init__.py b/site_scons/site_tools/dagger/__init__.py
index f05228cfe45..c63bfc6967e 100644
--- a/site_scons/site_tools/dagger/__init__.py
+++ b/site_scons/site_tools/dagger/__init__.py
@@ -5,7 +5,7 @@ import logging
import SCons
-import dagger
+from . import dagger
def generate(env, **kwargs):
"""The entry point for our tool. However, the builder for
@@ -14,25 +14,22 @@ def generate(env, **kwargs):
to the native builders for object/libraries.
"""
- env.Replace(LIBEMITTER=SCons.Builder.ListEmitter([env['LIBEMITTER'],
- dagger.emit_lib_db_entry]))
+ env.Replace(LIBEMITTER=SCons.Builder.ListEmitter([env['LIBEMITTER'], dagger.emit_lib_db_entry]))
running_os = os.sys.platform
if not (running_os.startswith('win') or running_os.startswith('sun')):
- env.Replace(PROGEMITTER=SCons.Builder.ListEmitter([env['PROGEMITTER'],
- dagger.emit_prog_db_entry]))
+ env.Replace(
+ PROGEMITTER=SCons.Builder.ListEmitter([env['PROGEMITTER'], dagger.emit_prog_db_entry]))
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
suffixes = ['.c', '.cc', '.cxx', '.cpp']
obj_builders = [static_obj, shared_obj]
- default_emitters = [SCons.Defaults.StaticObjectEmitter,
- SCons.Defaults.SharedObjectEmitter]
+ default_emitters = [SCons.Defaults.StaticObjectEmitter, SCons.Defaults.SharedObjectEmitter]
for suffix in suffixes:
for i in range(len(obj_builders)):
- obj_builders[i].add_emitter(suffix, SCons.Builder.ListEmitter([
- dagger.emit_obj_db_entry, default_emitters[i]
- ]))
+ obj_builders[i].add_emitter(
+ suffix, SCons.Builder.ListEmitter([dagger.emit_obj_db_entry, default_emitters[i]]))
env['BUILDERS']['__OBJ_DATABASE'] = SCons.Builder.Builder(
action=SCons.Action.Action(dagger.write_obj_db, None))
diff --git a/site_scons/site_tools/dagger/dagger.py b/site_scons/site_tools/dagger/dagger.py
index bace834783b..cc208dd23c2 100644
--- a/site_scons/site_tools/dagger/dagger.py
+++ b/site_scons/site_tools/dagger/dagger.py
@@ -40,8 +40,8 @@ import sys
import SCons
-import graph
-import graph_consts
+from . import graph
+from . import graph_consts
LIB_DB = [] # Stores every SCons library nodes
@@ -92,11 +92,9 @@ def get_symbol_worker(object_file, task):
uses = p.communicate()[0].decode()
if platform == 'linux':
- return list_process([use[19:] for use in uses.split('\n')
- if use != ''])
+ return list_process([use[19:] for use in uses.split('\n') if use != ''])
elif platform == 'darwin':
- return list_process([use.strip() for use in uses.split('\n')
- if use != ''])
+ return list_process([use.strip() for use in uses.split('\n') if use != ''])
def emit_obj_db_entry(target, source, env):
@@ -109,6 +107,7 @@ def emit_obj_db_entry(target, source, env):
OBJ_DB.append(t)
return target, source
+
def emit_prog_db_entry(target, source, env):
for t in target:
if str(t) is None:
@@ -117,6 +116,7 @@ def emit_prog_db_entry(target, source, env):
return target, source
+
def emit_lib_db_entry(target, source, env):
"""Emitter for libraries. We add each library
into our global variable"""
@@ -210,6 +210,7 @@ def __generate_file_rels(obj, g):
for obj in objs:
g.add_edge(graph_consts.FIL_FIL, file_node.id, obj)
+
def __generate_exe_rels(exe, g):
"""Generates all executable to library relationships, and populates the
contained files field in each NodeExe object"""
@@ -223,6 +224,7 @@ def __generate_exe_rels(exe, g):
exe_node.contained_files = set(EXE_DB[exe])
+
def write_obj_db(target, source, env):
"""The bulk of the tool. This method takes all the objects and libraries
which we have stored in the global LIB_DB and OBJ_DB variables and
@@ -240,7 +242,7 @@ def write_obj_db(target, source, env):
for obj in OBJ_DB:
__generate_file_rels(obj, g)
- for exe in EXE_DB.keys():
+ for exe in list(EXE_DB.keys()):
__generate_exe_rels(exe, g)
# target is given as a list of target SCons nodes - this builder is only responsible for
diff --git a/site_scons/site_tools/dagger/graph.py b/site_scons/site_tools/dagger/graph.py
index 5ebe6f45061..40c7fd9b2d9 100644
--- a/site_scons/site_tools/dagger/graph.py
+++ b/site_scons/site_tools/dagger/graph.py
@@ -4,10 +4,7 @@ import abc
import json
import copy
-import graph_consts
-
-if sys.version_info >= (3, 0):
- basestring = str
+from . import graph_consts
class Graph(object):
"""Graph class for storing the build dependency graph. The graph stores the
@@ -20,7 +17,7 @@ class Graph(object):
"""
A graph can be initialized with a .json file, graph object, or with no args
"""
- if isinstance(input, basestring):
+ if isinstance(input, str):
if input.endswith('.json'):
with open(input, 'r') as f:
data = json.load(f, encoding="ascii")
@@ -72,7 +69,7 @@ class Graph(object):
@nodes.setter
def nodes(self, value):
- if isinstance(value,dict):
+ if isinstance(value, dict):
self._nodes = value
else:
raise TypeError("Nodes must be a dict")
@@ -141,7 +138,7 @@ class Graph(object):
node_dict["id"] = id
node_dict["node"] = {}
- for property, value in vars(node).iteritems():
+ for property, value in vars(node).items():
if isinstance(value, set):
node_dict["node"][property] = list(value)
else:
@@ -151,7 +148,7 @@ class Graph(object):
for edge_type in graph_consts.RELATIONSHIP_TYPES:
edges_dict = self._edges[edge_type]
- for node in edges_dict.keys():
+ for node in list(edges_dict.keys()):
to_nodes = list(self._edges[edge_type][node])
to_nodes_dicts = [{"index": node_index[to_node], "id": to_node}
for to_node in to_nodes]
@@ -166,14 +163,13 @@ class Graph(object):
def __str__(self):
return ("<Number of Nodes : {0}, Number of Edges : {1}, "
- "Hash: {2}>").format(len(self._nodes.keys()),
- sum(len(x) for x in self._edges.values()), hash(self))
+ "Hash: {2}>").format(len(list(self._nodes.keys())),
+ sum(len(x) for x in list(self._edges.values())), hash(self))
-class NodeInterface(object):
+class NodeInterface(object, metaclass=abc.ABCMeta):
"""Abstract base class for all Node Objects - All nodes must have an id and name
"""
- __metaclass__ = abc.ABCMeta
@abc.abstractproperty
def id(self):
@@ -190,7 +186,7 @@ class NodeLib(NodeInterface):
def __init__(self, id, name, input=None):
if isinstance(input, dict):
should_fail = False
- for k, v in input.iteritems():
+ for k, v in input.items():
try:
if isinstance(v, list):
setattr(self, k, set(v))
@@ -287,10 +283,10 @@ class NodeLib(NodeInterface):
def __eq__(self, other):
if isinstance(other, NodeLib):
- return (self._id == other._id and self._defined_symbols == other._defined_symbols and
- self._defined_files == other._defined_files and
- self._dependent_libs == other._dependent_libs and
- self._dependent_files == other._dependent_files)
+ return (self._id == other._id and self._defined_symbols == other._defined_symbols
+ and self._defined_files == other._defined_files
+ and self._dependent_libs == other._dependent_libs
+ and self._dependent_files == other._dependent_files)
else:
return False
@@ -310,7 +306,7 @@ class NodeSymbol(NodeInterface):
if isinstance(input, dict):
should_fail = False
- for k, v in input.iteritems():
+ for k, v in input.items():
try:
if isinstance(v, list):
setattr(self, k, set(v))
@@ -413,11 +409,10 @@ class NodeSymbol(NodeInterface):
def __eq__(self, other):
if isinstance(other, NodeSymbol):
- return (self.id == other.id and self._libs == other._libs and
- self._files == other._files and
- self._dependent_libs == other._dependent_libs and
- self._dependent_files == other._dependent_files
- )
+ return (self.id == other.id and self._libs == other._libs
+ and self._files == other._files
+ and self._dependent_libs == other._dependent_libs
+ and self._dependent_files == other._dependent_files)
else:
return False
@@ -435,7 +430,7 @@ class NodeFile(NodeInterface):
def __init__(self, id, name, input=None):
if isinstance(input, dict):
should_fail = False
- for k, v in input.iteritems():
+ for k, v in input.items():
try:
if isinstance(v, list):
setattr(self, k, set(v))
@@ -526,16 +521,16 @@ class NodeFile(NodeInterface):
self.add_dependent_lib(from_node.library)
g.add_edge(graph_consts.LIB_FIL, from_node.library, self.id)
if lib_node is not None:
- lib_node.add_dependent_file(from_node.id)
- lib_node.add_dependent_lib(from_node.library)
- g.add_edge(graph_consts.FIL_LIB, from_node.id, lib_node.id)
+ lib_node.add_dependent_file(from_node.id)
+ lib_node.add_dependent_lib(from_node.library)
+ g.add_edge(graph_consts.FIL_LIB, from_node.id, lib_node.id)
def __eq__(self, other):
if isinstance(other, NodeSymbol):
- return (self.id == other.id and self._lib == other._lib and
- self._dependent_libs == other._dependent_libs and
- self._dependent_files == other._dependent_files and
- self._defined_symbols == other._defined_symbols)
+ return (self.id == other.id and self._lib == other._lib
+ and self._dependent_libs == other._dependent_libs
+ and self._dependent_files == other._dependent_files
+ and self._defined_symbols == other._defined_symbols)
else:
return False
@@ -551,7 +546,7 @@ class NodeExe(NodeInterface):
def __init__(self, id, name, input=None):
if isinstance(input, dict):
should_fail = False
- for k, v in input.iteritems():
+ for k, v in input.items():
try:
if isinstance(v, list):
setattr(self, k, set(v))
@@ -580,10 +575,12 @@ class NodeExe(NodeInterface):
return self.id
-types = {graph_consts.NODE_LIB: NodeLib,
- graph_consts.NODE_SYM: NodeSymbol,
- graph_consts.NODE_FILE: NodeFile,
- graph_consts.NODE_EXE: NodeExe,}
+types = {
+ graph_consts.NODE_LIB: NodeLib,
+ graph_consts.NODE_SYM: NodeSymbol,
+ graph_consts.NODE_FILE: NodeFile,
+ graph_consts.NODE_EXE: NodeExe,
+}
def node_factory(id, nodetype, dict_source=None):
diff --git a/site_scons/site_tools/dagger/graph_consts.py b/site_scons/site_tools/dagger/graph_consts.py
index 81fe86d75cd..eae5db9b6c6 100644
--- a/site_scons/site_tools/dagger/graph_consts.py
+++ b/site_scons/site_tools/dagger/graph_consts.py
@@ -1,5 +1,4 @@
"""Constants for use in graph.py and dagger.py"""
-
"""Relationship edge types"""
LIB_LIB = 1
LIB_FIL = 2
@@ -17,8 +16,8 @@ NODE_SYM = 2
NODE_FILE = 3
NODE_EXE = 4
-RELATIONSHIP_TYPES = range(1, 9)
-NODE_TYPES = range(1, 5)
+RELATIONSHIP_TYPES = list(range(1, 9))
+NODE_TYPES = list(range(1, 5))
"""Error/query codes"""
diff --git a/site_scons/site_tools/dagger/graph_test.py b/site_scons/site_tools/dagger/graph_test.py
index bc84f5868c7..39bdf77ab7c 100644
--- a/site_scons/site_tools/dagger/graph_test.py
+++ b/site_scons/site_tools/dagger/graph_test.py
@@ -5,8 +5,8 @@ from JSON
import json
import unittest
-import graph
-import graph_consts
+from . import graph
+from . import graph_consts
def generate_graph():
@@ -73,26 +73,22 @@ class CustomAssertions:
raise AssertionError("Nodes not of same type")
if node1.type == graph_consts.NODE_LIB:
- if (node1._defined_symbols != node2._defined_symbols or
- node1._defined_files != node2._defined_files or
- node1._dependent_libs != node2._dependent_libs or
- node1._dependent_files != node2._dependent_files or
- node1._id != node2._id):
+ if (node1._defined_symbols != node2._defined_symbols
+ or node1._defined_files != node2._defined_files
+ or node1._dependent_libs != node2._dependent_libs
+ or node1._dependent_files != node2._dependent_files or node1._id != node2._id):
raise AssertionError("Nodes not equal")
elif node1.type == graph_consts.NODE_SYM:
- if (node1._libs != node2._libs or node1._files != node2._files or
- node1._dependent_libs != node2._dependent_libs or
- node1._dependent_files != node2._dependent_files or
- node1.id != node2.id):
+ if (node1._libs != node2._libs or node1._files != node2._files
+ or node1._dependent_libs != node2._dependent_libs
+ or node1._dependent_files != node2._dependent_files or node1.id != node2.id):
raise AssertionError("Nodes not equal")
else:
- if (node1._lib != node2._lib or
- node1._dependent_libs != node2._dependent_libs or
- node1._dependent_files != node2._dependent_files or
- node1.id != node2.id or
- node1._defined_symbols != node2._defined_symbols):
+ if (node1._lib != node2._lib or node1._dependent_libs != node2._dependent_libs
+ or node1._dependent_files != node2._dependent_files or node1.id != node2.id
+ or node1._defined_symbols != node2._defined_symbols):
raise AssertionError("Nodes not equal")
@@ -104,11 +100,9 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
self.from_node_lib = graph.NodeLib("from_node_lib", "from_node_lib")
self.to_node_lib = graph.NodeLib("to_node_lib", "to_node_lib")
- self.from_node_file = graph.NodeFile(
- "from_node_file", "from_node_file")
+ self.from_node_file = graph.NodeFile("from_node_file", "from_node_file")
self.to_node_file = graph.NodeFile("to_node_file", "to_node_file")
- self.from_node_sym = graph.NodeSymbol(
- "from_node_symbol", "from_node_symbol")
+ self.from_node_sym = graph.NodeSymbol("from_node_symbol", "from_node_symbol")
self.to_node_sym = graph.NodeSymbol("to_node_symbol", "to_node_symbol")
self.g.add_node(self.from_node_lib)
@@ -122,15 +116,15 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
node = graph.NodeLib("test_node", "test_node")
self.g._nodes = {"test_node": node}
- self.assertEquals(self.g.get_node("test_node"), node)
+ self.assertEqual(self.g.get_node("test_node"), node)
- self.assertEquals(self.g.get_node("missing_node"), None)
+ self.assertEqual(self.g.get_node("missing_node"), None)
def test_add_node(self):
node = graph.NodeLib("test_node", "test_node")
self.g.add_node(node)
- self.assertEquals(self.g.get_node("test_node"), node)
+ self.assertEqual(self.g.get_node("test_node"), node)
self.assertRaises(ValueError, self.g.add_node, node)
@@ -153,16 +147,16 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
self.g.add_edge(graph_consts.LIB_FIL, self.from_node_lib.id,
self.to_node_file.id)
- self.assertEquals(self.g.edges[graph_consts.LIB_LIB][
+ self.assertEqual(self.g.edges[graph_consts.LIB_LIB][
self.from_node_lib.id], set([self.to_node_lib.id]))
- self.assertEquals(self.g.edges[graph_consts.LIB_SYM][
+ self.assertEqual(self.g.edges[graph_consts.LIB_SYM][
self.from_node_lib.id], set([self.to_node_sym.id]))
- self.assertEquals(self.g.edges[graph_consts.LIB_FIL][
+ self.assertEqual(self.g.edges[graph_consts.LIB_FIL][
self.from_node_lib.id], set([self.to_node_file.id]))
- self.assertEquals(self.to_node_lib.dependent_libs,
+ self.assertEqual(self.to_node_lib.dependent_libs,
set([self.from_node_lib.id]))
def test_add_edge_files(self):
@@ -173,14 +167,14 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
self.g.add_edge(graph_consts.FIL_LIB, self.from_node_file.id,
self.to_node_lib.id)
- self.assertEquals(self.g.edges[graph_consts.FIL_FIL][
+ self.assertEqual(self.g.edges[graph_consts.FIL_FIL][
self.from_node_file.id], set([self.to_node_file.id]))
- self.assertEquals(self.g.edges[graph_consts.FIL_SYM][
+ self.assertEqual(self.g.edges[graph_consts.FIL_SYM][
self.from_node_file.id], set([self.to_node_sym.id]))
- self.assertEquals(self.g.edges[graph_consts.FIL_LIB][
+ self.assertEqual(self.g.edges[graph_consts.FIL_LIB][
self.from_node_file.id], set([self.to_node_lib.id]))
- self.assertEquals(self.to_node_file.dependent_files,
+ self.assertEqual(self.to_node_file.dependent_files,
set([self.from_node_file.id]))
def test_export_to_json(self):
@@ -188,7 +182,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
generated_graph.export_to_json("export_test.json")
generated = open("export_test.json", "r")
correct = open("test_graph.json", "r")
- self.assertEquals(json.load(generated), json.load(correct))
+ self.assertEqual(json.load(generated), json.load(correct))
generated.close()
correct.close()
@@ -196,7 +190,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
graph_fromJSON = graph.Graph("test_graph.json")
correct_graph = generate_graph()
- for id in graph_fromJSON.nodes.keys():
+ for id in list(graph_fromJSON.nodes.keys()):
# for some reason, neither
# assertTrue(graph_fromJSON.get_node(id) == correct_graph.get_node(str(id)))
# nor assertEquals() seem to call the correct eq method here, hence
@@ -205,7 +199,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
self.assertNodeEquals(
graph_fromJSON.get_node(id), correct_graph.get_node(id))
- self.assertEquals(graph_fromJSON.edges, correct_graph.edges)
+ self.assertEqual(graph_fromJSON.edges, correct_graph.edges)
if __name__ == '__main__':
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
index 861f5d9e2e2..cc72c0655f7 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -20,7 +20,7 @@ import shutil
import tarfile
import time
import zipfile
-import StringIO
+import io
from distutils.spawn import find_executable
@@ -28,7 +28,7 @@ __distsrc_callbacks = []
class DistSrcFile:
def __init__(self, **kwargs):
- [ setattr(self, key, val) for (key, val) in kwargs.items() ]
+ [ setattr(self, key, val) for (key, val) in list(kwargs.items()) ]
def __str__(self):
return self.name
@@ -60,6 +60,7 @@ class DistSrcArchive:
def close(self):
self.archive_file.close()
+
class DistSrcTarArchive(DistSrcArchive):
def __iter__(self):
file_list = self.archive_file.getnames()
@@ -82,7 +83,7 @@ class DistSrcTarArchive(DistSrcArchive):
def append_file_contents(self, filename, file_contents,
mtime=time.time(),
- mode=0644,
+ mode=0o644,
uname="root",
gname="root"):
file_metadata = tarfile.TarInfo(name=filename)
@@ -91,7 +92,7 @@ class DistSrcTarArchive(DistSrcArchive):
file_metadata.uname = uname
file_metadata.gname = gname
file_metadata.size = len(file_contents)
- file_buf = StringIO.StringIO(file_contents)
+ file_buf = io.BytesIO(file_contents.encode('utf-8'))
if self.archive_mode == 'r':
self.archive_file.close()
self.archive_file = tarfile.open(
@@ -105,6 +106,7 @@ class DistSrcTarArchive(DistSrcArchive):
def append_file(self, filename, localfile):
self.archive_file.add(localfile, arcname=filename)
+
class DistSrcZipArchive(DistSrcArchive):
def __iter__(self):
file_list = self.archive_file.namelist()
@@ -119,7 +121,7 @@ class DistSrcZipArchive(DistSrcArchive):
name=key,
size=item_data.file_size,
mtime=time.mktime(fixed_time),
- mode=0775 if is_dir else 0664,
+ mode=0o775 if is_dir else 0o664,
type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE,
uid=0,
gid=0,
@@ -129,7 +131,7 @@ class DistSrcZipArchive(DistSrcArchive):
def append_file_contents(self, filename, file_contents,
mtime=time.time(),
- mode=0644,
+ mode=0o644,
uname="root",
gname="root"):
self.archive_file.writestr(filename, file_contents)
@@ -139,7 +141,7 @@ class DistSrcZipArchive(DistSrcArchive):
def build_error_action(msg):
def error_stub(target=None, source=None, env=None):
- print msg
+ print(msg)
env.Exit(1)
return [ error_stub ]
@@ -162,7 +164,7 @@ def distsrc_action_generator(source, target, env, for_signature):
target_ext = str(target[0])[-3:]
if not target_ext in [ 'zip', 'tar' ]:
- print "Invalid file format for distsrc. Must be tar or zip file"
+ print("Invalid file format for distsrc. Must be tar or zip file")
env.Exit(1)
git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % (
@@ -173,14 +175,14 @@ def distsrc_action_generator(source, target, env, for_signature):
SCons.Action.Action(run_distsrc_callbacks, "Running distsrc callbacks for $TARGET")
]
+
def add_callback(env, fn):
__distsrc_callbacks.append(fn)
+
def generate(env, **kwargs):
env.AddMethod(add_callback, 'AddDistSrcCallback')
- env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(
- generator=distsrc_action_generator,
- )
+ env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(generator=distsrc_action_generator, )
def DistSrc(env, target):
result = env.__DISTSRC(target=target, source=[])
@@ -190,5 +192,6 @@ def generate(env, **kwargs):
env.AddMethod(DistSrc, 'DistSrc')
+
def exists(env):
return True
diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py
index cd69de95085..51e6cd54b26 100644
--- a/site_scons/site_tools/git_decider.py
+++ b/site_scons/site_tools/git_decider.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
# If available, uses Git metadata to decide whether files are out of date.
+
def generate(env, **kwargs):
# Grab the existing decider functions out of the environment
@@ -47,7 +47,7 @@ def generate(env, **kwargs):
dependency.get_ninfo().csig = gitInfoForDep
return False
- if not(hasattr(prev_ni, 'csig')):
+ if not (hasattr(prev_ni, 'csig')):
prev_ni.csig = gitInfoForDep
result = gitInfoForDep == prev_ni.csig
@@ -60,6 +60,7 @@ def generate(env, **kwargs):
env.Decider(MongoGitDecider)
+
def exists(env):
try:
from git import Git
diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py
index 8419352b481..3142d46cae9 100644
--- a/site_scons/site_tools/gziptool.py
+++ b/site_scons/site_tools/gziptool.py
@@ -18,7 +18,7 @@ import shutil
def GZipAction(target, source, env, **kw):
dst_gzip = gzip.GzipFile(str(target[0]), 'wb')
- with open(str(source[0]), 'r') as src_file:
+ with open(str(source[0]), 'rb') as src_file:
shutil.copyfileobj(src_file, dst_gzip)
dst_gzip.close()
@@ -38,5 +38,6 @@ def generate(env, **kwargs):
env.AddMethod(GZipTool, 'GZip')
+
def exists(env):
return True
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index 9838b633490..003594a7a87 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -22,6 +22,7 @@ from pkg_resources import parse_version
icecream_version_min = '1.1rc2'
+
def generate(env):
if not exists(env):
@@ -31,14 +32,16 @@ def generate(env):
env['ICECC'] = env.WhereIs('$ICECC')
if not 'ICERUN' in env:
- env['ICERUN'] = env.File('$ICECC').File('icerun')
+ env['ICERUN'] = env.File('$ICECC').File('icerun')
# Absoluteify, for parity with ICECC
env['ICERUN'] = env.WhereIs('$ICERUN')
# We can't handle sanitizer blacklist files, so disable icecc then, and just flow through
# icerun to prevent slamming the local system with a huge -j value.
- if any(f.startswith("-fsanitize-blacklist=") for fs in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS'] for f in env[fs]):
+ if any(
+ f.startswith("-fsanitize-blacklist=") for fs in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS']
+ for f in env[fs]):
env['ICECC'] = '$ICERUN'
# Make CC and CXX absolute paths too. It is better for icecc.
@@ -46,7 +49,7 @@ def generate(env):
env['CXX'] = env.WhereIs('$CXX')
# Make a predictable name for the toolchain
- icecc_version_target_filename=env.subst('$CC$CXX').replace('/', '_')
+ icecc_version_target_filename = env.subst('$CC$CXX').replace('/', '_')
icecc_version = env.Dir('$BUILD_ROOT/scons/icecc').File(icecc_version_target_filename)
# Make an isolated environment so that our setting of ICECC_VERSION in the environment
@@ -63,12 +66,9 @@ def generate(env):
env['ENV']['ICECC_CLANG_REMOTE_CPP'] = 1
else:
toolchain = toolchain_env.Command(
- target=icecc_version,
- source=['$ICECC_CREATE_ENV', '$CC', '$CXX'],
- action=[
+ target=icecc_version, source=['$ICECC_CREATE_ENV', '$CC', '$CXX'], action=[
"${SOURCES[0]} --gcc ${SOURCES[1].abspath} ${SOURCES[2].abspath} $TARGET",
- ]
- )
+ ])
env.AppendUnique(CCFLAGS=['-fdirectives-only'])
# Add ICECC_VERSION to the environment, pointed at the generated
@@ -99,7 +99,7 @@ def generate(env):
suffixes = _CSuffixes + _CXXSuffixes
for object_builder in SCons.Tool.createObjBuilders(env):
emitterdict = object_builder.builder.emitter
- for suffix in emitterdict.iterkeys():
+ for suffix in emitterdict.keys():
if not suffix in suffixes:
continue
base = emitterdict[suffix]
@@ -128,6 +128,7 @@ def generate(env):
# env['ENV']['ICECC_DEBUG'] = 'debug'
# env['ENV']['ICECC_LOGFILE'] = 'icecc.log'
+
def exists(env):
icecc = env.get('ICECC', False)
@@ -135,10 +136,9 @@ def exists(env):
return False
icecc = env.WhereIs(icecc)
- pipe = SCons.Action._subproc(env, SCons.Util.CLVar(icecc) + ['--version'],
- stdin = 'devnull',
- stderr = 'devnull',
- stdout = subprocess.PIPE)
+ pipe = SCons.Action._subproc(env,
+ SCons.Util.CLVar(icecc) + ['--version'], stdin='devnull',
+ stderr='devnull', stdout=subprocess.PIPE, text=True)
if pipe.wait() != 0:
return False
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
index c7f64293f5d..58873b576d0 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -21,6 +21,7 @@ import sys
import SCons
+
def idlc_emitter(target, source, env):
"""For each input IDL file, the tool produces a .cpp and .h file."""
first_source = str(source[0])
@@ -44,9 +45,9 @@ def idl_scanner(node, env, path):
# Use the import scanner mode of the IDL compiler to file imported files
cmd = [sys.executable, "buildscripts/idl/idlc.py", '--include','src', str(node), '--write-dependencies']
try:
- deps_str = subprocess.check_output(cmd)
+ deps_str = subprocess.check_output(cmd).decode('utf-8')
except subprocess.CalledProcessError as e:
- print("IDLC ERROR: %s" % (e.output) )
+ print(("IDLC ERROR: %s" % (e.output) ))
raise
deps_list = deps_str.splitlines()
@@ -61,29 +62,26 @@ def idl_scanner(node, env, path):
idl_scanner = SCons.Scanner.Scanner(function=idl_scanner, skeys=['.idl'])
# TODO: create a scanner for imports when imports are implemented
-IDLCBuilder = SCons.Builder.Builder(
- action=IDLCAction,
- emitter=idlc_emitter,
- srcsuffx=".idl",
- suffix=".cpp",
- source_scanner = idl_scanner
- )
+IDLCBuilder = SCons.Builder.Builder(action=IDLCAction, emitter=idlc_emitter, srcsuffx=".idl",
+ suffix=".cpp", source_scanner=idl_scanner)
def generate(env):
bld = IDLCBuilder
- env.Append(SCANNERS = idl_scanner)
+ env.Append(SCANNERS=idl_scanner)
env['BUILDERS']['Idlc'] = bld
env['IDLC'] = sys.executable + " buildscripts/idl/idlc.py"
env['IDLCFLAGS'] = ''
base_dir = env.subst('$BUILD_ROOT/$VARIANT_DIR').replace("#", "")
- env['IDLCCOM'] = '$IDLC --include src --base_dir %s --target_arch $TARGET_ARCH --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES ' % (base_dir)
+ env['IDLCCOM'] = '$IDLC --include src --base_dir %s --target_arch $TARGET_ARCH --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES ' % (
+ base_dir)
env['IDLCSUFFIX'] = '.idl'
env['IDL_HAS_INLINE_DEPENDENCIES'] = True
+
def exists(env):
return True
diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py
index cf74ef9674c..31f16a482da 100644
--- a/site_scons/site_tools/incremental_link.py
+++ b/site_scons/site_tools/incremental_link.py
@@ -14,10 +14,12 @@
import SCons
+
def _tag_as_precious(target, source, env):
env.Precious(target)
return target, source
+
def generate(env):
builders = env['BUILDERS']
for builder in ('Program', 'SharedLibrary', 'LoadableModule'):
@@ -27,6 +29,7 @@ def generate(env):
_tag_as_precious,
])
+
def exists(env):
# By default, the windows linker is incremental, so unless
# overridden in the environment with /INCREMENTAL:NO, the tool is
diff --git a/site_scons/site_tools/jsheader.py b/site_scons/site_tools/jsheader.py
index cb418506200..4c2765b7108 100644
--- a/site_scons/site_tools/jsheader.py
+++ b/site_scons/site_tools/jsheader.py
@@ -1,13 +1,14 @@
from SCons.Script import Action
+
def jsToH(env, target, source):
- return env.Command(
- target=target,
- source=['#site_scons/site_tools/jstoh.py'] + source,
- action=Action('$PYTHON ${SOURCES[0]} $TARGET ${SOURCES[1:]}'))
+ return env.Command(target=target, source=['#site_scons/site_tools/jstoh.py'] + source,
+ action=Action('$PYTHON ${SOURCES[0]} $TARGET ${SOURCES[1:]}'))
+
def generate(env, **kw):
env.AddMethod(jsToH, 'JSHeader')
+
def exists(env):
return True
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
index 26eb6cbbf24..50c0b66cf32 100644
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -39,7 +39,7 @@ def jsToHeader(target, source):
text = '\n'.join(h)
- with open(outFile, 'wb') as out:
+ with open(outFile, 'w') as out:
try:
out.write(text)
finally:
@@ -48,7 +48,7 @@ def jsToHeader(target, source):
if __name__ == "__main__":
if len(sys.argv) < 3:
- print "Must specify [target] [source] "
+ print("Must specify [target] [source] ")
sys.exit(1)
jsToHeader(sys.argv[1], sys.argv[2:])
diff --git a/site_scons/site_tools/libtool.py b/site_scons/site_tools/libtool.py
index 84a646ed066..a098d722fe8 100644
--- a/site_scons/site_tools/libtool.py
+++ b/site_scons/site_tools/libtool.py
@@ -1,5 +1,6 @@
import SCons
+
def generate(env):
env['AR'] = 'libtool'
@@ -13,5 +14,6 @@ def generate(env):
env['RANLIBCOM'] = noop_action
env['RANLIBCOMSTR'] = 'Skipping ranlib for libtool generated target $TARGET'
+
def exists(env):
return env.detect('libtool')
diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py
index 2064516aa1f..d9c6a02f91e 100644
--- a/site_scons/site_tools/mongo_benchmark.py
+++ b/site_scons/site_tools/mongo_benchmark.py
@@ -11,10 +11,10 @@ def register_benchmark(env, test):
env.Alias('$BENCHMARK_ALIAS', test)
def benchmark_list_builder_action(env, target, source):
- ofile = open(str(target[0]), 'wb')
+ ofile = open(str(target[0]), 'w')
try:
for s in _benchmarks:
- print '\t' + str(s)
+ print('\t' + str(s))
ofile.write('%s\n' % s)
finally:
ofile.close()
@@ -40,9 +40,10 @@ def build_benchmark(env, target, source, **kwargs):
bmEnv.Install("#/build/benchmark/", result[0])
return result
+
def generate(env):
env.Command('$BENCHMARK_LIST', env.Value(_benchmarks),
- Action(benchmark_list_builder_action, "Generating $TARGET"))
+ Action(benchmark_list_builder_action, "Generating $TARGET"))
env.AddMethod(register_benchmark, 'RegisterBenchmark')
env.AddMethod(build_benchmark, 'Benchmark')
env.Alias('$BENCHMARK_ALIAS', '$BENCHMARK_LIST')
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index 0ced90c9493..8e830958960 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -12,10 +12,10 @@ def register_integration_test(env, test):
env.Alias('$INTEGRATION_TEST_ALIAS', installed_test)
def integration_test_list_builder_action(env, target, source):
- ofile = open(str(target[0]), 'wb')
+ ofile = open(str(target[0]), 'w')
try:
for s in _integration_tests:
- print '\t' + str(s)
+ print('\t' + str(s))
ofile.write('%s\n' % s)
finally:
ofile.close()
@@ -31,9 +31,10 @@ def build_cpp_integration_test(env, target, source, **kwargs):
env.RegisterIntegrationTest(result[0])
return result
+
def generate(env):
env.Command('$INTEGRATION_TEST_LIST', env.Value(_integration_tests),
- Action(integration_test_list_builder_action, "Generating $TARGET"))
+ Action(integration_test_list_builder_action, "Generating $TARGET"))
env.AddMethod(register_integration_test, 'RegisterIntegrationTest')
env.AddMethod(build_cpp_integration_test, 'CppIntegrationTest')
env.Alias('$INTEGRATION_TEST_ALIAS', '$INTEGRATION_TEST_LIST')
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index 2ad0f51bfd0..938be7f9741 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -11,10 +11,10 @@ def register_unit_test(env, test):
env.Alias('$UNITTEST_ALIAS', test)
def unit_test_list_builder_action(env, target, source):
- ofile = open(str(target[0]), 'wb')
+ ofile = open(str(target[0]), 'w')
try:
for s in _unittests:
- print '\t' + str(s)
+ print('\t' + str(s))
ofile.write('%s\n' % s)
finally:
ofile.close()
@@ -33,9 +33,10 @@ def build_cpp_unit_test(env, target, source, **kwargs):
env.Install("#/build/unittests/", result[0])
return result
+
def generate(env):
env.Command('$UNITTEST_LIST', env.Value(_unittests),
- Action(unit_test_list_builder_action, "Generating $TARGET"))
+ Action(unit_test_list_builder_action, "Generating $TARGET"))
env.AddMethod(register_unit_test, 'RegisterUnitTest')
env.AddMethod(build_cpp_unit_test, 'CppUnitTest')
env.Alias('$UNITTEST_ALIAS', '$UNITTEST_LIST')
diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py
index 3edd50518e1..6e78796244b 100644
--- a/site_scons/site_tools/separate_debug.py
+++ b/site_scons/site_tools/separate_debug.py
@@ -14,6 +14,7 @@
import SCons
+
def _update_builder(env, builder, bitcode):
old_scanner = builder.target_scanner
@@ -25,7 +26,8 @@ def _update_builder(env, builder, bitcode):
if origin:
origin_results = old_scanner(origin, env, path)
for origin_result in origin_results:
- origin_result_debug_file = getattr(origin_result.attributes, "separate_debug_file", None)
+ origin_result_debug_file = getattr(origin_result.attributes, "separate_debug_file",
+ None)
if origin_result_debug_file:
results.append(origin_result_debug_file)
# TODO: Do we need to do the same sort of drag along for bcsymbolmap files?
@@ -52,38 +54,26 @@ def _update_builder(env, builder, bitcode):
base_action.list.append(
SCons.Action.Action(
"dsymutil $TARGET --symbol-map=${TARGET}.bcsymbolmap -o ${TARGET}.dSYM",
- "Generating debug info for $TARGET into ${TARGET}.dSYM"
- )
- )
+ "Generating debug info for $TARGET into ${TARGET}.dSYM"))
else:
base_action.list.append(
- SCons.Action.Action(
- "dsymutil $TARGET -o ${TARGET}.dSYM",
- "Generating debug info for $TARGET into ${TARGET}.dSYM"
- )
- )
- base_action.list.append(
- SCons.Action.Action(
- "strip -Sx ${TARGET}",
- "Stripping ${TARGET}"
- )
- )
+ SCons.Action.Action("dsymutil $TARGET -o ${TARGET}.dSYM",
+ "Generating debug info for $TARGET into ${TARGET}.dSYM"))
+ base_action.list.append(SCons.Action.Action("strip -Sx ${TARGET}", "Stripping ${TARGET}"))
elif env.TargetOSIs('posix'):
base_action.list.extend([
- SCons.Action.Action(
- "${OBJCOPY} --only-keep-debug $TARGET ${TARGET}.debug",
- "Generating debug info for $TARGET into ${TARGET}.debug"
- ),
+ SCons.Action.Action("${OBJCOPY} --only-keep-debug $TARGET ${TARGET}.debug",
+ "Generating debug info for $TARGET into ${TARGET}.debug"),
SCons.Action.Action(
"${OBJCOPY} --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
- "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug"
- ),
+ "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug"),
])
else:
pass
base_emitter = builder.emitter
+
def new_emitter(target, source, env):
bitcode_file = None
@@ -111,6 +101,7 @@ def _update_builder(env, builder, bitcode):
new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter])
builder.emitter = new_emitter
+
def generate(env):
if not exists(env):
return
@@ -132,12 +123,12 @@ def generate(env):
"-Wl,-bitcode_symbol_map,${TARGET}.bcsymbolmap",
])
-
# TODO: For now, not doing this for programs. Need to update
# auto_install_binaries to understand to install the debug symbol
# for target X to the same target location as X.
for builder in ['SharedLibrary', 'LoadableModule']:
_update_builder(env, env['BUILDERS'][builder], bitcode)
+
def exists(env):
return True
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
index 95130c9e9a3..c57b9e96822 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -26,6 +26,7 @@ _CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++']
if SCons.Util.case_sensitive_suffixes('.c', '.C'):
_CXXSuffixes.append('.C')
+
def _dwo_emitter(target, source, env):
new_targets = []
for t in target:
@@ -40,6 +41,7 @@ def _dwo_emitter(target, source, env):
targets = target + new_targets
return (targets, source)
+
def generate(env):
suffixes = []
if _splitDwarfFlag in env['CCFLAGS']:
@@ -52,7 +54,7 @@ def generate(env):
for object_builder in SCons.Tool.createObjBuilders(env):
emitterdict = object_builder.builder.emitter
- for suffix in emitterdict.iterkeys():
+ for suffix in emitterdict.keys():
if not suffix in suffixes:
continue
base = emitterdict[suffix]
@@ -61,5 +63,6 @@ def generate(env):
_dwo_emitter,
])
+
def exists(env):
return any(_splitDwarfFlag in env[f] for f in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS'])
diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py
index 15357874438..b4b5b91ad5e 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -17,6 +17,7 @@ import SCons
import re
import subprocess
+
def exists(env):
if not 'AR' in env:
return False
@@ -30,10 +31,9 @@ def exists(env):
if not "rc" in env['ARFLAGS']:
return False
- pipe = SCons.Action._subproc(env, SCons.Util.CLVar(ar) + ['--version'],
- stdin = 'devnull',
- stderr = 'devnull',
- stdout = subprocess.PIPE)
+ pipe = SCons.Action._subproc(env,
+ SCons.Util.CLVar(ar) + ['--version'], stdin='devnull',
+ stderr='devnull', stdout=subprocess.PIPE)
if pipe.wait() != 0:
return False
@@ -41,7 +41,7 @@ def exists(env):
for line in pipe.stdout:
if found:
continue # consume all data
- found = re.search(r'^GNU ar|^LLVM', line)
+ found = re.search(r'^GNU ar|^LLVM', line.decode('utf-8'))
return bool(found)
@@ -56,6 +56,7 @@ def _add_emitter(builder):
new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter])
builder.emitter = new_emitter
+
def _add_scanner(builder):
old_scanner = builder.target_scanner
path_function = old_scanner.path_function
@@ -69,13 +70,16 @@ def _add_scanner(builder):
new_results.extend(base.children())
return new_results
- builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, path_function=path_function)
+ builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
+ path_function=path_function)
+
def generate(env):
if not exists(env):
return
- env['ARFLAGS'] = SCons.Util.CLVar([arflag if arflag != "rc" else "rcsTD" for arflag in env['ARFLAGS']])
+ env['ARFLAGS'] = SCons.Util.CLVar(
+ [arflag if arflag != "rc" else "rcsTD" for arflag in env['ARFLAGS']])
def noop_action(env, target, source):
pass
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
index 9ec68c35470..5ddebb2e003 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -9,4 +9,4 @@ def generate(env):
if 'DEVELOPER_DIR' in os.environ:
env['ENV']['DEVELOPER_DIR'] = os.environ['DEVELOPER_DIR']
- print "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands"
+ print("NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands")
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index 6067fd412f8..dc0dc2b5628 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -160,9 +160,9 @@ js_engine_ver = get_option("js-engine") if get_option("server-js") == "on" else
# On windows, we need to escape the backslashes in the command-line
# so that windows paths look okay.
-cmd_line = " ".join(sys.argv).encode('string-escape')
+cmd_line = " ".join(sys.argv).encode('unicode_escape')
if env.TargetOSIs('windows'):
- cmd_line = cmd_line.replace('\\', r'\\')
+ cmd_line = cmd_line.replace(b'\\', b'\\')
module_list = '{ %s }' % ', '.join([ '"{0}"'.format(x) for x in env['MONGO_MODULES'] ])
@@ -751,7 +751,7 @@ env.Append(MODULE_BANNERS = [distsrc.File('README'),
# If no module has introduced a file named LICENSE-Enterprise.txt then this
# is a Community build, so inject the AGPL and the Community license
-if sum(itertools.imap(lambda x: x.name == "LICENSE-Enterprise.txt", env['MODULE_BANNERS'])) == 0:
+if sum(map(lambda x: x.name == "LICENSE-Enterprise.txt", env['MODULE_BANNERS'])) == 0:
env.Append(MODULE_BANNERS = [distsrc.File('LICENSE-Community.txt')])
# All module banners get staged to the top level of the tarfile, so we
@@ -770,7 +770,7 @@ module_banner_transforms = ["--transform %s=$SERVER_DIST_BASENAME" % d for d in
# Allow modules to map original file name directories to subdirectories
# within the archive (e.g. { "src/mongo/db/modules/enterprise/docs": "snmp"})
archive_addition_transforms = []
-for full_dir, archive_dir in env["ARCHIVE_ADDITION_DIR_MAP"].items():
+for full_dir, archive_dir in list(env["ARCHIVE_ADDITION_DIR_MAP"].items()):
archive_addition_transforms.append("--transform \"%s=$SERVER_DIST_BASENAME/%s\"" %
(full_dir, archive_dir))
diff --git a/src/mongo/base/generate_error_codes.py b/src/mongo/base/generate_error_codes.py
index 57b88957dde..bbc507fc39a 100644
--- a/src/mongo/base/generate_error_codes.py
+++ b/src/mongo/base/generate_error_codes.py
@@ -26,7 +26,6 @@
# delete this exception statement from your version. If you delete this
# exception statement from all source files in the program, then also delete
# it in the license file.
-
"""Generate error_codes.{h,cpp} from error_codes.err.
Format of error_codes.err:
@@ -46,21 +45,18 @@ from collections import namedtuple
from Cheetah.Template import Template
import sys
+
def render_template(template_path, **kw):
'''Renders the template file located at template_path, using the variables defined by kw, and
returns the result as a string'''
template = Template.compile(
- file=template_path,
- compilerSettings=dict(
- directiveStartToken="//#",
- directiveEndToken="//#",
- commentStartToken="//##"
- ),
- baseclass=dict,
- useCache=False)
+ file=template_path,
+ compilerSettings=dict(directiveStartToken="//#", directiveEndToken="//#",
+ commentStartToken="//##"), baseclass=dict, useCache=False)
return str(template(**kw))
+
class ErrorCode:
def __init__(self, name, code, extra=None):
self.name = name
@@ -69,11 +65,11 @@ class ErrorCode:
if extra:
split = extra.split('::')
if not split[0]:
- die("Error for %s with extra info %s: fully qualified namespaces aren't supported"
- % (name, extra))
+ die("Error for %s with extra info %s: fully qualified namespaces aren't supported" %
+ (name, extra))
if split[0] == "mongo":
- die("Error for %s with extra info %s: don't include the mongo namespace"
- % (name, extra))
+ die("Error for %s with extra info %s: don't include the mongo namespace" % (name,
+ extra))
if len(split) > 1:
self.extra_class = split.pop()
self.extra_ns = "::".join(split)
@@ -82,11 +78,13 @@ class ErrorCode:
self.extra_ns = None
self.categories = []
+
class ErrorClass:
def __init__(self, name, codes):
self.name = name
self.codes = codes
+
def main(argv):
# Parse and validate argv.
if len(sys.argv) < 2:
@@ -113,7 +111,7 @@ def main(argv):
categories=error_classes,
)
- with open(output, 'wb') as outfile:
+ with open(output, 'w') as outfile:
outfile.write(text)
def die(message=None):
@@ -145,6 +143,7 @@ def check_for_conflicts(error_codes, error_classes):
if failed:
die()
+
def has_duplicate_error_codes(error_codes):
sorted_by_name = sorted(error_codes, key=lambda x: x.name)
sorted_by_code = sorted(error_codes, key=lambda x: x.code)
@@ -153,21 +152,22 @@ def has_duplicate_error_codes(error_codes):
prev = sorted_by_name[0]
for curr in sorted_by_name[1:]:
if curr.name == prev.name:
- sys.stdout.write('Duplicate name %s with codes %s and %s\n'
- % (curr.name, curr.code, prev.code))
+ sys.stdout.write(
+ 'Duplicate name %s with codes %s and %s\n' % (curr.name, curr.code, prev.code))
failed = True
prev = curr
prev = sorted_by_code[0]
for curr in sorted_by_code[1:]:
if curr.code == prev.code:
- sys.stdout.write('Duplicate code %s with names %s and %s\n'
- % (curr.code, curr.name, prev.name))
+ sys.stdout.write(
+ 'Duplicate code %s with names %s and %s\n' % (curr.code, curr.name, prev.name))
failed = True
prev = curr
return failed
+
def has_duplicate_error_classes(error_classes):
names = sorted(ec.name for ec in error_classes)
@@ -180,6 +180,7 @@ def has_duplicate_error_classes(error_classes):
prev_name = name
return failed
+
def has_missing_error_codes(error_codes, error_classes):
code_names = dict((ec.name, ec) for ec in error_codes)
failed = False
@@ -193,5 +194,6 @@ def has_missing_error_codes(error_codes, error_classes):
return failed
+
if __name__ == '__main__':
main(sys.argv)
diff --git a/src/mongo/db/auth/generate_action_types.py b/src/mongo/db/auth/generate_action_types.py
index 3d3a36c0412..618669fdfee 100755
--- a/src/mongo/db/auth/generate_action_types.py
+++ b/src/mongo/db/auth/generate_action_types.py
@@ -26,7 +26,6 @@
# delete this exception statement from your version. If you delete this
# exception statement from all source files in the program, then also delete
# it in the license file.
-
"""Generate action_type.{h,cpp}
Usage:
@@ -35,7 +34,6 @@ Usage:
import sys
-
headerFileTemplate = """// AUTO-GENERATED FILE DO NOT EDIT
// See src/mongo/db/auth/generate_action_types.py
/**
@@ -194,14 +192,14 @@ namespace mongo {
} // namespace mongo
"""
+
def writeSourceFile(actionTypes, sourceOutputFile):
actionTypeConstants = ""
fromStringIfStatements = ""
toStringCaseStatements = ""
for actionType in actionTypes:
actionTypeConstants += (" const ActionType ActionType::%(actionType)s"
- "(%(actionType)sValue);\n" %
- dict(actionType=actionType))
+ "(%(actionType)sValue);\n" % dict(actionType=actionType))
fromStringIfStatements += """ if (action == "%(actionType)s") {
*result = %(actionType)s;
return Status::OK();
@@ -215,6 +213,7 @@ def writeSourceFile(actionTypes, sourceOutputFile):
pass
+
def writeHeaderFile(actionTypes, headerOutputFile):
actionTypeConstants = ""
actionTypeIdentifiers = ""
@@ -225,6 +224,7 @@ def writeHeaderFile(actionTypes, headerOutputFile):
actionTypeIdentifiers=actionTypeIdentifiers)
headerOutputFile.write(formattedHeaderFile)
+
def hasDuplicateActionTypes(actionTypes):
sortedActionTypes = sorted(actionTypes)
@@ -232,7 +232,7 @@ def hasDuplicateActionTypes(actionTypes):
prevActionType = sortedActionTypes[0]
for actionType in sortedActionTypes[1:]:
if actionType == prevActionType:
- print 'Duplicate actionType %s\n' % actionType
+ print('Duplicate actionType %s\n' % actionType)
didFail = True
prevActionType = actionType
@@ -245,7 +245,7 @@ def parseActionTypesFromFile(actionTypesFilename):
if __name__ == "__main__":
if len(sys.argv) != 4:
- print "Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>"
+ print("Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>")
sys.exit(-1)
actionTypes = parseActionTypesFromFile(sys.argv[1])
diff --git a/src/mongo/db/fts/generate_stop_words.py b/src/mongo/db/fts/generate_stop_words.py
index 31603eb92ed..0d356a2a351 100644
--- a/src/mongo/db/fts/generate_stop_words.py
+++ b/src/mongo/db/fts/generate_stop_words.py
@@ -1,7 +1,7 @@
import sys
def generate( header, source, language_files ):
- out = open( header, "wb" )
+ out = open( header, "w" )
out.write( """
#pragma once
#include <set>
@@ -18,8 +18,8 @@ namespace fts {
- out = open( source, "wb" )
- out.write( '#include "%s"' % header.rpartition( "/" )[2].rpartition( "\\" )[2] )
+ out = open( source, "w", encoding='utf-8')
+ out.write( '#include "{}"'.format(header.rpartition( "/" )[2].rpartition( "\\" )[2]) )
out.write( """
namespace mongo {
namespace fts {
@@ -35,12 +35,13 @@ namespace fts {
out.write( ' {\n' )
out.write( ' const char* const words[] = {\n' )
for word in open( l_file, "rb" ):
- out.write( ' "%s",\n' % word.strip() )
+ out.write( ' "%s",\n' % word.decode('utf-8').strip() )
out.write( ' };\n' )
out.write( ' const size_t wordcnt = sizeof(words) / sizeof(words[0]);\n' )
out.write( ' std::set< std::string >& l = (*m)["%s"];\n' % l )
out.write( ' l.insert(&words[0], &words[wordcnt]);\n' )
out.write( ' }\n' )
+
out.write( """
}
} // namespace fts
diff --git a/src/mongo/db/fts/unicode/gen_casefold_map.py b/src/mongo/db/fts/unicode/gen_casefold_map.py
index 19003693a2f..98378d94fb1 100644
--- a/src/mongo/db/fts/unicode/gen_casefold_map.py
+++ b/src/mongo/db/fts/unicode/gen_casefold_map.py
@@ -6,6 +6,7 @@ import sys
from gen_helper import getCopyrightNotice, openNamespaces, closeNamespaces, \
include
+
def generate(unicode_casefold_file, target):
"""Generates a C++ source file that contains a Unicode case folding
function.
@@ -13,7 +14,7 @@ def generate(unicode_casefold_file, target):
The case folding function contains a switch statement with cases for every
Unicode codepoint that has a case folding mapping.
"""
- out = open(target, "w")
+ out = open(target, "w", encoding='utf-8')
out.write(getCopyrightNotice())
out.write(include("mongo/db/fts/unicode/codepoints.h"))
@@ -22,9 +23,10 @@ def generate(unicode_casefold_file, target):
case_mappings = {}
- cf_file = open(unicode_casefold_file, 'rU')
+ cf_file = open(unicode_casefold_file, 'rb')
for line in cf_file:
+ line = line.decode('utf-8')
# Filter out blank lines and lines that start with #
data = line[:line.find('#')]
if(data == ""):
@@ -76,18 +78,19 @@ def generate(unicode_casefold_file, target):
for mapping in sorted_mappings:
if mapping[0] <= 0x7f:
- continue # ascii is special cased above.
+ continue # ascii is special cased above.
if mapping[0] in turkishMapping:
- out.write("case 0x%x: return mode == CaseFoldMode::kTurkish ? 0x%x : 0x%x;\n"
- % (mapping[0], turkishMapping[mapping[0]], mapping[1]))
+ out.write("case 0x%x: return mode == CaseFoldMode::kTurkish ? 0x%x : 0x%x;\n" %
+ (mapping[0], turkishMapping[mapping[0]], mapping[1]))
else:
- out.write("case 0x%x: return 0x%x;\n"%mapping)
+ out.write("case 0x%x: return 0x%x;\n" % mapping)
out.write("\
default: return codepoint;\n }\n}")
out.write(closeNamespaces())
+
if __name__ == "__main__":
generate(sys.argv[1], sys.argv[2])
diff --git a/src/mongo/db/fts/unicode/gen_delimiter_list.py b/src/mongo/db/fts/unicode/gen_delimiter_list.py
index 6cb007ab52a..152fcd77993 100644
--- a/src/mongo/db/fts/unicode/gen_delimiter_list.py
+++ b/src/mongo/db/fts/unicode/gen_delimiter_list.py
@@ -5,6 +5,7 @@ import sys
from gen_helper import getCopyrightNotice, openNamespaces, closeNamespaces, \
include
+
def generate(unicode_proplist_file, target):
"""Generates a C++ source file that contains a delimiter checking function.
@@ -21,25 +22,22 @@ def generate(unicode_proplist_file, target):
delim_codepoints = set()
- proplist_file = open(unicode_proplist_file, 'rU')
+ proplist_file = open(unicode_proplist_file, 'r')
- delim_properties = ["White_Space",
- "Dash",
- "Hyphen",
- "Quotation_Mark",
- "Terminal_Punctuation",
- "Pattern_Syntax",
- "STerm"]
+ delim_properties = [
+ "White_Space", "Dash", "Hyphen", "Quotation_Mark", "Terminal_Punctuation", "Pattern_Syntax",
+ "STerm"
+ ]
for line in proplist_file:
# Filter out blank lines and lines that start with #
data = line[:line.find('#')]
- if(data == ""):
+ if (data == ""):
continue
# Parse the data on the line
values = data.split("; ")
- assert(len(values) == 2)
+ assert (len(values) == 2)
uproperty = values[1].strip()
if uproperty in delim_properties:
@@ -47,7 +45,7 @@ def generate(unicode_proplist_file, target):
codepoint_range = values[0].split('..')
start = int(codepoint_range[0], 16)
- end = int(codepoint_range[1], 16) + 1
+ end = int(codepoint_range[1], 16) + 1
for i in range(start, end):
if i not in delim_codepoints:
@@ -82,7 +80,7 @@ def generate(unicode_proplist_file, target):
switch (codepoint) {\n""")
for delim in sorted(delim_codepoints):
- if delim <= 0x7f: # ascii codepoints handled in lists above.
+ if delim <= 0x7f: # ascii codepoints handled in lists above.
continue
out.write("\
case " + str(hex(delim)) + ": return true;\n")
@@ -92,5 +90,6 @@ def generate(unicode_proplist_file, target):
out.write(closeNamespaces())
+
if __name__ == "__main__":
generate(sys.argv[1], sys.argv[2])
diff --git a/src/mongo/db/fts/unicode/gen_diacritic_list.py b/src/mongo/db/fts/unicode/gen_diacritic_list.py
index baab6e0b9b7..3859e0e7fe3 100644
--- a/src/mongo/db/fts/unicode/gen_diacritic_list.py
+++ b/src/mongo/db/fts/unicode/gen_diacritic_list.py
@@ -5,6 +5,7 @@ import sys
from gen_helper import getCopyrightNotice, openNamespaces, closeNamespaces, \
include
+
def generate(unicode_proplist_file, target):
"""Generates a C++ source file that contains a diacritic checking function.
@@ -20,17 +21,17 @@ def generate(unicode_proplist_file, target):
diacritics = set()
- proplist_file = open(unicode_proplist_file, 'rU')
+ proplist_file = open(unicode_proplist_file, 'r')
for line in proplist_file:
# Filter out blank lines and lines that start with #
data = line[:line.find('#')]
- if(data == ""):
+ if (data == ""):
continue
# Parse the data on the line
values = data.split("; ")
- assert(len(values) == 2)
+ assert (len(values) == 2)
uproperty = values[1].strip()
if uproperty in "Diacritic":
@@ -38,7 +39,7 @@ def generate(unicode_proplist_file, target):
codepoint_range = values[0].split('..')
start = int(codepoint_range[0], 16)
- end = int(codepoint_range[1], 16) + 1
+ end = int(codepoint_range[1], 16) + 1
for i in range(start, end):
if i not in diacritics:
@@ -59,5 +60,6 @@ def generate(unicode_proplist_file, target):
out.write(closeNamespaces())
+
if __name__ == "__main__":
generate(sys.argv[1], sys.argv[2])
diff --git a/src/mongo/db/fts/unicode/gen_diacritic_map.py b/src/mongo/db/fts/unicode/gen_diacritic_map.py
index d77a7d1dd16..bad8919c24c 100644
--- a/src/mongo/db/fts/unicode/gen_diacritic_map.py
+++ b/src/mongo/db/fts/unicode/gen_diacritic_map.py
@@ -8,18 +8,19 @@ from gen_helper import getCopyrightNotice, openNamespaces, closeNamespaces, \
diacritics = set()
+
def load_diacritics(unicode_proplist_file):
proplist_file = open(unicode_proplist_file, 'r')
for line in proplist_file:
# Filter out blank lines and lines that start with #
data = line[:line.find('#')]
- if(data == ""):
+ if (data == ""):
continue
# Parse the data on the line
values = data.split("; ")
- assert(len(values) == 2)
+ assert (len(values) == 2)
uproperty = values[1].strip()
if uproperty == "Diacritic":
@@ -27,7 +28,7 @@ def load_diacritics(unicode_proplist_file):
codepoint_range = values[0].split('..')
start = int(codepoint_range[0], 16)
- end = int(codepoint_range[1], 16) + 1
+ end = int(codepoint_range[1], 16) + 1
for i in range(start, end):
if i not in diacritics:
@@ -36,8 +37,10 @@ def load_diacritics(unicode_proplist_file):
if int(values[0], 16) not in diacritics:
diacritics.add(int(values[0], 16))
+
diacritic_mappings = {}
+
def add_diacritic_mapping(codepoint):
# a : original unicode character
# d : decomposed unicode character
@@ -45,7 +48,7 @@ def add_diacritic_mapping(codepoint):
# c : recomposed unicode character with diacritics removed
a = chr(codepoint)
d = normalize('NFD', a)
- r = u''
+ r = ''
for i in range(len(d)):
if ord(d[i]) not in diacritics:
@@ -55,14 +58,16 @@ def add_diacritic_mapping(codepoint):
# Only use mappings where the final recomposed form is a single codepoint
if (a != c and len(c) == 1):
- assert c != '\0' # This is used to indicate the codepoint is a pure diacritic.
+ assert c != '\0' # This is used to indicate the codepoint is a pure diacritic.
assert ord(c) not in diacritics
diacritic_mappings[codepoint] = ord(c[0])
+
def add_diacritic_range(start, end):
for x in range(start, end + 1):
add_diacritic_mapping(x)
+
def generate(target):
"""Generates a C++ source file that contains a diacritic removal mapping
function.
@@ -101,8 +106,9 @@ def generate(target):
out.write(closeNamespaces())
+
if __name__ == "__main__":
- if(unidata_version != '8.0.0'):
+ if (unidata_version != '8.0.0'):
print("""ERROR: This script must be run with a version of Python that \
contains the Unicode 8.0.0 Character Database.""")
sys.exit(1)
diff --git a/src/mongo/db/fts/unicode/gen_helper.py b/src/mongo/db/fts/unicode/gen_helper.py
index 5825bea57de..9f470904c4a 100644
--- a/src/mongo/db/fts/unicode/gen_helper.py
+++ b/src/mongo/db/fts/unicode/gen_helper.py
@@ -30,11 +30,14 @@ def getCopyrightNotice():
* THIS IS A GENERATED FILE, DO NOT MODIFY.
*/\n\n"""
+
def openNamespaces():
return "namespace mongo {\nnamespace unicode {\n\n"
+
def closeNamespaces():
return "\n} // namespace unicode\n} // namespace mongo\n"
+
def include(header):
return '#include "' + header + '"\n'
diff --git a/src/mongo/installer/compass/install_compass.in b/src/mongo/installer/compass/install_compass.in
index 87f99dafd48..9f922bc8ce5 100755
--- a/src/mongo/installer/compass/install_compass.in
+++ b/src/mongo/installer/compass/install_compass.in
@@ -63,7 +63,7 @@ def download_pkg(link, pkg_format=''):
' to download the compass installer for your platform.'
try:
- out = subprocess.check_output(['file', filename])
+ out = subprocess.check_output(['file', filename]).decode('utf-8')
except subprocess.CalledProcessError as error:
print 'Got an unexpected error checking file type %s' % error
sys.exit(1)
@@ -80,8 +80,9 @@ def install_mac(dmg):
tmp = tempfile.mkdtemp()
with open(os.devnull, 'w') as fnull:
try:
- subprocess.check_call(['hdiutil', 'attach', '-nobrowse', '-noautoopen',
- '-mountpoint', tmp, dmg], stdout=fnull, stderr=fnull)
+ subprocess.check_call(
+ ['hdiutil', 'attach', '-nobrowse', '-noautoopen', '-mountpoint', tmp, dmg],
+ stdout=fnull, stderr=fnull)
except subprocess.CalledProcessError as error:
print 'Problem running hdiutil: %s' % error
@@ -98,16 +99,13 @@ def install_mac(dmg):
except IOError:
print 'Unknown error copying MongoDB Compass to /Applications/'
finally:
- subprocess.check_call(
- ['hdiutil', 'detach', tmp], stdout=fnull, stderr=fnull)
+ subprocess.check_call(['hdiutil', 'detach', tmp], stdout=fnull, stderr=fnull)
if path.isdir('/Applications/MongoDB Compass.app'):
- subprocess.check_call(
- ['open', '/Applications/MongoDB Compass.app'])
+ subprocess.check_call(['open', '/Applications/MongoDB Compass.app'])
return
if path.isdir('/Applications/MongoDB Compass Community.app'):
- subprocess.check_call(
- ['open', '/Applications/MongoDB Compass Community.app'])
+ subprocess.check_call(['open', '/Applications/MongoDB Compass Community.app'])
return
@@ -135,9 +133,8 @@ def is_supported_distro():
if (distro_name == 'Ubuntu' and float(version_number) >= 14.04):
return True
- if ((distro_name == 'Red Hat Enterprise Linux Server' or
- 'CentOS' in distro_name) and
- (float(version_number) >= 7.0)):
+ if ((distro_name == 'Red Hat Enterprise Linux Server' or 'CentOS' in distro_name)
+ and (float(version_number) >= 7.0)):
return True
return False
diff --git a/src/mongo/installer/msi/SConscript b/src/mongo/installer/msi/SConscript
index 32efbc03893..84403c73c92 100644
--- a/src/mongo/installer/msi/SConscript
+++ b/src/mongo/installer/msi/SConscript
@@ -11,7 +11,7 @@ if not env.TargetOSIs('windows'):
import re
import subprocess
-import _winreg
+import winreg
env = env.Clone()
@@ -40,12 +40,12 @@ if programfilesx86 is None:
programfilesx86 = "C:\\Program Files (x86)"
# Use vswhere (it has a fixed stable path) to query where Visual Studio is installed.
-vsinstall_path = subprocess.check_output([os.path.join(programfilesx86, "Microsoft Visual Studio", "Installer", "vswhere.exe"), "-version", "[15.0,16.0)", "-property", "installationPath", "-nologo"]).strip()
+vsinstall_path = subprocess.check_output([os.path.join(programfilesx86, "Microsoft Visual Studio", "Installer", "vswhere.exe"), "-version", "[15.0,16.0)", "-property", "installationPath", "-nologo"]).decode('utf-8').strip()
# Check the registry key that has the runtime lib version
try:
- vsruntime_key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\VisualStudio\\14.0\\VC\\Runtimes\\x64")
- vslib_version,vslib_version_type = _winreg.QueryValueEx(vsruntime_key, "Version")
+ vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\VisualStudio\\14.0\\VC\\Runtimes\\x64")
+ vslib_version,vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version")
except WindowsError:
print("Visual Studio Runtime not found in registry, disabling msi installer")
Return()
diff --git a/src/mongo/util/generate_icu_init_cpp.py b/src/mongo/util/generate_icu_init_cpp.py
index 56f11217eca..848291d1bb7 100755
--- a/src/mongo/util/generate_icu_init_cpp.py
+++ b/src/mongo/util/generate_icu_init_cpp.py
@@ -31,6 +31,7 @@ import optparse
import os
import sys
+
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-o', '--output', action='store', dest='output_cpp_file',
@@ -46,6 +47,7 @@ def main(argv):
parser.error("input ICU data file unspecified")
generate_cpp_file(options.input_data_file, options.output_cpp_file)
+
def generate_cpp_file(data_file_path, cpp_file_path):
source_template = '''// AUTO-GENERATED FILE DO NOT EDIT
// See generate_icu_init_cpp.py.
@@ -113,8 +115,8 @@ MONGO_INITIALIZER_GENERAL(LoadICUData, MONGO_NO_PREREQUISITES, ("BeginStartupOpt
'''
decimal_encoded_data = ''
with open(data_file_path, 'rb') as data_file:
- decimal_encoded_data = ','.join([str(ord(byte)) for byte in data_file.read()])
- with open(cpp_file_path, 'wb') as cpp_file:
+ decimal_encoded_data = ','.join([str(byte) for byte in data_file.read()])
+ with open(cpp_file_path, 'w') as cpp_file:
cpp_file.write(source_template % dict(decimal_encoded_data=decimal_encoded_data))
if __name__ == '__main__':