summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--SConstruct1909
-rwxr-xr-xbuildscripts/pylinters.py45
-rw-r--r--docs/linting.md6
-rw-r--r--etc/evergreen_yml_components/definitions.yml19
-rw-r--r--site_scons/libdeps.py240
-rw-r--r--site_scons/mongo/__init__.py2
-rw-r--r--site_scons/mongo/generators.py16
-rw-r--r--site_scons/mongo/install_actions.py20
-rw-r--r--site_scons/mongo/pip_requirements.py12
-rw-r--r--site_scons/site_tools/abilink.py6
-rw-r--r--site_scons/site_tools/auto_archive.py24
-rw-r--r--site_scons/site_tools/auto_install_binaries.py120
-rw-r--r--site_scons/site_tools/ccache.py11
-rw-r--r--site_scons/site_tools/compilation_db.py22
-rw-r--r--site_scons/site_tools/distsrc.py50
-rw-r--r--site_scons/site_tools/forceincludes.py10
-rw-r--r--site_scons/site_tools/git_decider.py1
-rw-r--r--site_scons/site_tools/gziptool.py9
-rw-r--r--site_scons/site_tools/icecream.py130
-rwxr-xr-xsite_scons/site_tools/idl_tool.py29
-rw-r--r--site_scons/site_tools/incremental_link.py15
-rwxr-xr-xsite_scons/site_tools/jstoh.py6
-rw-r--r--site_scons/site_tools/mongo_benchmark.py7
-rw-r--r--site_scons/site_tools/mongo_integrationtest.py7
-rw-r--r--site_scons/site_tools/mongo_libfuzzer.py12
-rw-r--r--site_scons/site_tools/mongo_test_execution.py41
-rw-r--r--site_scons/site_tools/mongo_test_list.py7
-rw-r--r--site_scons/site_tools/mongo_unittest.py7
-rw-r--r--site_scons/site_tools/ninja.py242
-rw-r--r--site_scons/site_tools/separate_debug.py64
-rw-r--r--site_scons/site_tools/split_dwarf.py5
-rw-r--r--site_scons/site_tools/tapilink.py16
-rw-r--r--site_scons/site_tools/thin_archive.py6
-rw-r--r--site_scons/site_tools/validate_cache_dir.py51
-rw-r--r--site_scons/site_tools/vcredist.py51
-rw-r--r--site_scons/site_tools/xcode.py3
-rw-r--r--src/SConscript9
-rw-r--r--src/mongo/SConscript32
-rw-r--r--src/mongo/base/SConscript8
-rw-r--r--src/mongo/bson/SConscript2
-rw-r--r--src/mongo/bson/util/SConscript8
-rw-r--r--src/mongo/client/SConscript25
-rw-r--r--src/mongo/client/sdam/SConscript28
-rw-r--r--src/mongo/crypto/SConscript47
-rw-r--r--src/mongo/db/SConscript83
-rw-r--r--src/mongo/db/auth/SConscript14
-rw-r--r--src/mongo/db/catalog/SConscript22
-rw-r--r--src/mongo/db/catalog/util/SConscript7
-rw-r--r--src/mongo/db/commands/SConscript38
-rw-r--r--src/mongo/db/concurrency/SConscript5
-rw-r--r--src/mongo/db/cst/SConscript6
-rw-r--r--src/mongo/db/exec/SConscript34
-rw-r--r--src/mongo/db/exec/document_value/SConscript6
-rw-r--r--src/mongo/db/exec/sbe/SConscript62
-rw-r--r--src/mongo/db/free_mon/SConscript1
-rw-r--r--src/mongo/db/ftdc/SConscript8
-rw-r--r--src/mongo/db/fts/SConscript33
-rw-r--r--src/mongo/db/fts/unicode/SConscript17
-rw-r--r--src/mongo/db/geo/SConscript24
-rw-r--r--src/mongo/db/index/SConscript11
-rw-r--r--src/mongo/db/matcher/SConscript2
-rw-r--r--src/mongo/db/ops/SConscript2
-rw-r--r--src/mongo/db/pipeline/SConscript55
-rw-r--r--src/mongo/db/pipeline/process_interface/SConscript2
-rw-r--r--src/mongo/db/query/SConscript15
-rw-r--r--src/mongo/db/query/ce/SConscript4
-rw-r--r--src/mongo/db/query/collation/SConscript3
-rw-r--r--src/mongo/db/query/datetime/SConscript16
-rw-r--r--src/mongo/db/query/optimizer/SConscript6
-rw-r--r--src/mongo/db/query/optimizer/algebra/SConscript4
-rw-r--r--src/mongo/db/repl/SConscript98
-rw-r--r--src/mongo/db/s/SConscript18
-rw-r--r--src/mongo/db/serverless/SConscript16
-rw-r--r--src/mongo/db/sorter/SConscript2
-rw-r--r--src/mongo/db/stats/SConscript12
-rw-r--r--src/mongo/db/storage/SConscript26
-rw-r--r--src/mongo/db/storage/kv/SConscript1
-rw-r--r--src/mongo/db/storage/wiredtiger/SConscript6
-rw-r--r--src/mongo/db/timeseries/SConscript4
-rw-r--r--src/mongo/db/views/SConscript2
-rw-r--r--src/mongo/dbtests/SConscript2
-rw-r--r--src/mongo/embedded/SConscript13
-rw-r--r--src/mongo/embedded/mongo_embedded/SConscript28
-rw-r--r--src/mongo/embedded/mongoc_embedded/SConscript31
-rw-r--r--src/mongo/embedded/stitch_support/SConscript24
-rw-r--r--src/mongo/executor/SConscript45
-rw-r--r--src/mongo/idl/SConscript9
-rw-r--r--src/mongo/installer/SConscript4
-rw-r--r--src/mongo/installer/compass/SConscript2
-rw-r--r--src/mongo/installer/msi/SConscript85
-rw-r--r--src/mongo/installer/msi/ca/SConscript5
-rw-r--r--src/mongo/logv2/SConscript2
-rw-r--r--src/mongo/platform/SConscript9
-rw-r--r--src/mongo/resmoke/SConscript4
-rw-r--r--src/mongo/rpc/SConscript7
-rw-r--r--src/mongo/s/SConscript23
-rw-r--r--src/mongo/s/catalog/SConscript8
-rw-r--r--src/mongo/s/commands/SConscript8
-rw-r--r--src/mongo/s/query/SConscript6
-rw-r--r--src/mongo/scripting/SConscript11
-rw-r--r--src/mongo/shell/SConscript34
-rw-r--r--src/mongo/stdx/SConscript2
-rw-r--r--src/mongo/tools/SConscript2
-rw-r--r--src/mongo/transport/SConscript7
-rw-r--r--src/mongo/unittest/SConscript16
-rw-r--r--src/mongo/util/SConscript90
-rw-r--r--src/mongo/util/cmdline_utils/SConscript4
-rw-r--r--src/mongo/util/concurrency/SConscript28
-rw-r--r--src/mongo/util/net/SConscript18
-rw-r--r--src/mongo/util/options_parser/SConscript7
-rw-r--r--src/mongo/watchdog/SConscript2
-rw-r--r--src/third_party/IntelRDFPMathLib20U1/SConscript24
-rw-r--r--src/third_party/SConscript523
-rw-r--r--src/third_party/abseil-cpp-master/SConscript13
-rw-r--r--src/third_party/asio-master/SConscript2
-rw-r--r--src/third_party/benchmark/SConscript46
-rw-r--r--src/third_party/boost/SConscript68
-rw-r--r--src/third_party/fmt/SConscript3
-rw-r--r--src/third_party/gperftools/SConscript42
-rw-r--r--src/third_party/icu4c-57.1/source/SConscript30
-rw-r--r--src/third_party/kms-message/SConscript2
-rw-r--r--src/third_party/libstemmer_c/SConscript4
-rw-r--r--src/third_party/mozjs/SConscript100
-rw-r--r--src/third_party/pcre-8.42/SConscript13
-rw-r--r--src/third_party/pcre2/SConscript14
-rw-r--r--src/third_party/s2/SConscript59
-rwxr-xr-xsrc/third_party/s2/base/SConscript12
-rwxr-xr-xsrc/third_party/s2/strings/SConscript8
-rwxr-xr-xsrc/third_party/s2/util/coding/SConscript9
-rwxr-xr-xsrc/third_party/s2/util/math/SConscript19
-rw-r--r--src/third_party/snappy-1.1.7/SConscript7
-rw-r--r--src/third_party/tomcrypt-1.18.2/SConscript14
-rw-r--r--src/third_party/unwind/SConscript53
-rw-r--r--src/third_party/wiredtiger/SConscript73
-rw-r--r--src/third_party/yaml-cpp/SConscript3
135 files changed, 3035 insertions, 2687 deletions
diff --git a/SConstruct b/SConstruct
index 2350fd17714..5cd00221797 100644
--- a/SConstruct
+++ b/SConstruct
@@ -34,10 +34,13 @@ import mongo.install_actions as install_actions
EnsurePythonVersion(3, 6)
EnsureSConsVersion(3, 1, 1)
+
# Monkey patch SCons.FS.File.release_target_info to be a no-op.
# See https://github.com/SCons/scons/issues/3454
def release_target_info_noop(self):
pass
+
+
SCons.Node.FS.File.release_target_info = release_target_info_noop
from buildscripts import utils
@@ -50,6 +53,7 @@ print('scons: running with args {}'.format(scons_invocation))
atexit.register(mongo.print_build_failures)
+
def add_option(name, **kwargs):
if 'dest' not in kwargs:
@@ -60,18 +64,22 @@ def add_option(name, **kwargs):
AddOption('--' + name, **kwargs)
+
def get_option(name):
return GetOption(name)
+
def has_option(name):
optval = GetOption(name)
# Options with nargs=0 are true when their value is the empty tuple. Otherwise,
# if the value is falsish (empty string, None, etc.), coerce to False.
return True if optval == () else bool(optval)
+
def use_system_version_of_library(name):
return has_option('use-system-all') or has_option('use-system-' + name)
+
# Returns true if we have been configured to use a system version of any C++ library. If you
# add a new C++ library dependency that may be shimmed out to the system, add it to the below
# list.
@@ -79,12 +87,15 @@ def using_system_version_of_cxx_libraries():
cxx_library_names = ["tcmalloc", "boost"]
return True in [use_system_version_of_library(x) for x in cxx_library_names]
+
def make_variant_dir_generator():
memoized_variant_dir = [False]
+
def generate_variant_dir(target, source, env, for_signature):
if not memoized_variant_dir[0]:
memoized_variant_dir[0] = env.subst('$BUILD_ROOT/$VARIANT_DIR')
return memoized_variant_dir[0]
+
return generate_variant_dir
@@ -104,7 +115,8 @@ SetOption('random', 1)
# using the nargs='const' mechanism.
#
-add_option('ninja',
+add_option(
+ 'ninja',
choices=['enabled', 'disabled'],
default='disabled',
nargs='?',
@@ -113,19 +125,22 @@ add_option('ninja',
help='Enable the build.ninja generator tool stable or canary version',
)
-add_option('force-jobs',
+add_option(
+ 'force-jobs',
help='Allow more jobs than available cpu\'s when icecream is not enabled.',
- nargs=0
+ nargs=0,
)
-add_option('build-tools',
+add_option(
+ 'build-tools',
choices=['stable', 'next'],
default='stable',
type='choice',
help='Enable experimental build tools',
)
-add_option('legacy-tarball',
+add_option(
+ 'legacy-tarball',
choices=['true', 'false'],
default='false',
const='true',
@@ -134,14 +149,16 @@ add_option('legacy-tarball',
help='Build a tarball matching the old MongoDB dist targets',
)
-add_option('lint-scope',
+add_option(
+ 'lint-scope',
choices=['all', 'changed'],
default='all',
type='choice',
- help='Lint files in the current git diff instead of all files'
+ help='Lint files in the current git diff instead of all files',
)
-add_option('install-mode',
+add_option(
+ 'install-mode',
choices=['hygienic'],
default='hygienic',
help='select type of installation',
@@ -149,30 +166,36 @@ add_option('install-mode',
type='choice',
)
-add_option('install-action',
+add_option(
+ 'install-action',
choices=([*install_actions.available_actions] + ['default']),
default='default',
- help='select mechanism to use to install files (advanced option to reduce disk IO and utilization)',
+ help=
+ 'select mechanism to use to install files (advanced option to reduce disk IO and utilization)',
nargs=1,
type='choice',
)
-add_option('build-dir',
+add_option(
+ 'build-dir',
default='#build',
help='build output directory',
)
-add_option('release',
+add_option(
+ 'release',
help='release build',
nargs=0,
)
-add_option('lto',
+add_option(
+ 'lto',
help='enable link time optimizations (experimental, except with MSVC)',
nargs=0,
)
-add_option('endian',
+add_option(
+ 'endian',
choices=['big', 'little', 'auto'],
default='auto',
help='endianness of target platform',
@@ -180,12 +203,14 @@ add_option('endian',
type='choice',
)
-add_option('disable-minimum-compiler-version-enforcement',
+add_option(
+ 'disable-minimum-compiler-version-enforcement',
help='allow use of unsupported older compilers (NEVER for production builds)',
nargs=0,
)
-add_option('ssl',
+add_option(
+ 'ssl',
help='Enable or Disable SSL',
choices=['on', 'off'],
default='on',
@@ -194,7 +219,8 @@ add_option('ssl',
type='choice',
)
-add_option('wiredtiger',
+add_option(
+ 'wiredtiger',
choices=['on', 'off'],
const='on',
default='on',
@@ -203,7 +229,8 @@ add_option('wiredtiger',
type='choice',
)
-add_option('ocsp-stapling',
+add_option(
+ 'ocsp-stapling',
choices=['on', 'off'],
default='on',
help='Enable OCSP Stapling on servers',
@@ -212,36 +239,42 @@ add_option('ocsp-stapling',
)
js_engine_choices = ['mozjs', 'none']
-add_option('js-engine',
+add_option(
+ 'js-engine',
choices=js_engine_choices,
default=js_engine_choices[0],
help='JavaScript scripting engine implementation',
type='choice',
)
-add_option('server-js',
+add_option(
+ 'server-js',
choices=['on', 'off'],
default='on',
help='Build mongod without JavaScript support',
type='choice',
)
-add_option('libc++',
+add_option(
+ 'libc++',
help='use libc++ (experimental, requires clang)',
nargs=0,
)
-add_option('use-glibcxx-debug',
+add_option(
+ 'use-glibcxx-debug',
help='Enable the glibc++ debug implementations of the C++ standard libary',
nargs=0,
)
-add_option('noshell',
+add_option(
+ 'noshell',
help="don't build shell",
nargs=0,
)
-add_option('dbg',
+add_option(
+ 'dbg',
choices=['on', 'off'],
const='on',
default='off',
@@ -250,7 +283,8 @@ add_option('dbg',
type='choice',
)
-add_option('separate-debug',
+add_option(
+ 'separate-debug',
choices=['on', 'off'],
const='on',
default='off',
@@ -259,7 +293,8 @@ add_option('separate-debug',
type='choice',
)
-add_option('spider-monkey-dbg',
+add_option(
+ 'spider-monkey-dbg',
choices=['on', 'off'],
const='on',
default='off',
@@ -268,7 +303,8 @@ add_option('spider-monkey-dbg',
type='choice',
)
-add_option('opt',
+add_option(
+ 'opt',
choices=['on', 'size', 'off'],
const='on',
help='Enable compile-time optimization',
@@ -291,75 +327,87 @@ experimental_optimization_choices = ['*']
experimental_optimization_choices.extend("+" + opt for opt in experimental_optimizations)
experimental_optimization_choices.extend("-" + opt for opt in experimental_optimizations)
-add_option('experimental-optimization',
+add_option(
+ 'experimental-optimization',
action="append",
choices=experimental_optimization_choices,
const=experimental_optimization_choices[0],
default=['+sandybridge'],
help='Enable experimental optimizations',
nargs='?',
- type='choice'
+ type='choice',
)
-add_option('debug-compress',
+add_option(
+ 'debug-compress',
action="append",
choices=["off", "as", "ld"],
default=["auto"],
help="Compress debug sections",
)
-add_option('sanitize',
+add_option(
+ 'sanitize',
help='enable selected sanitizers',
metavar='san1,san2,...sanN',
)
-add_option('sanitize-coverage',
+add_option(
+ 'sanitize-coverage',
help='enable selected coverage sanitizers',
metavar='cov1,cov2,...covN',
)
-add_option('allocator',
+add_option(
+ 'allocator',
choices=["auto", "system", "tcmalloc", "tcmalloc-experimental"],
default="auto",
help='allocator to use (use "auto" for best choice for current platform)',
type='choice',
)
-add_option('gdbserver',
+add_option(
+ 'gdbserver',
help='build in gdb server support',
nargs=0,
)
-add_option('lldb-server',
+add_option(
+ 'lldb-server',
help='build in lldb server support',
nargs=0,
)
-add_option('gcov',
+add_option(
+ 'gcov',
help='compile with flags for gcov',
nargs=0,
)
-add_option('enable-free-mon',
+add_option(
+ 'enable-free-mon',
choices=["auto", "on", "off"],
default="auto",
help='Disable support for Free Monitoring to avoid HTTP client library dependencies',
type='choice',
)
-add_option('enable-http-client',
+add_option(
+ 'enable-http-client',
choices=["auto", "on", "off"],
default="auto",
help='Enable support for HTTP client requests (required WinHTTP or cURL)',
type='choice',
)
-add_option('use-sasl-client',
+add_option(
+ 'use-sasl-client',
help='Support SASL authentication in the client library',
nargs=0,
)
-add_option('use-diagnostic-latches',
+add_option(
+ 'use-diagnostic-latches',
choices=['on', 'off'],
default='on',
help='Enable annotated Mutex types',
@@ -368,38 +416,45 @@ add_option('use-diagnostic-latches',
# Most of the "use-system-*" options follow a simple form.
for pack in [
- ('asio', 'ASIO',),
- ('boost',),
- ('fmt',),
+ (
+ 'asio',
+ 'ASIO',
+ ),
+ ('boost', ),
+ ('fmt', ),
('google-benchmark', 'Google benchmark'),
('icu', 'ICU'),
('intel_decimal128', 'intel decimal128'),
- ('kms-message',),
- ('pcre',),
- ('pcre2',),
- ('snappy',),
- ('stemmer',),
- ('tcmalloc',),
- ('libunwind',),
- ('valgrind',),
- ('wiredtiger',),
- ('yaml',),
- ('zlib',),
+ ('kms-message', ),
+ ('pcre', ),
+ ('pcre2', ),
+ ('snappy', ),
+ ('stemmer', ),
+ ('tcmalloc', ),
+ ('libunwind', ),
+ ('valgrind', ),
+ ('wiredtiger', ),
+ ('yaml', ),
+ ('zlib', ),
('zstd', 'Zstandard'),
- ]:
+]:
name = pack[0]
pretty = name
if len(pack) == 2:
pretty = pack[1]
- add_option(f'use-system-{name}',
- help=f'use system version of {pretty} library',
- nargs=0)
+ add_option(
+ f'use-system-{name}',
+ help=f'use system version of {pretty} library',
+ nargs=0,
+ )
-add_option('system-boost-lib-search-suffixes',
+add_option(
+ 'system-boost-lib-search-suffixes',
help='Comma delimited sequence of boost library suffixes to search',
)
-add_option('use-system-mongo-c',
+add_option(
+ 'use-system-mongo-c',
choices=['on', 'off', 'auto'],
const='on',
default="auto",
@@ -408,12 +463,14 @@ add_option('use-system-mongo-c',
type='choice',
)
-add_option('use-system-all',
+add_option(
+ 'use-system-all',
help='use all system libraries',
nargs=0,
)
-add_option('build-fast-and-loose',
+add_option(
+ 'build-fast-and-loose',
choices=['on', 'off', 'auto'],
const='on',
default='auto',
@@ -422,69 +479,80 @@ add_option('build-fast-and-loose',
type='choice',
)
-add_option("disable-warnings-as-errors",
+add_option(
+ "disable-warnings-as-errors",
action="append",
choices=["configure", "source"],
const="source",
default=[],
- help="Don't add a warnings-as-errors flag to compiler command lines in selected contexts; defaults to 'source' if no argument is provided",
+ help=
+ "Don't add a warnings-as-errors flag to compiler command lines in selected contexts; defaults to 'source' if no argument is provided",
nargs="?",
type="choice",
)
-add_option('detect-odr-violations',
+add_option(
+ 'detect-odr-violations',
help="Have the linker try to detect ODR violations, if supported",
nargs=0,
)
-add_option('variables-help',
+add_option(
+ 'variables-help',
help='Print the help text for SCons variables',
nargs=0,
)
-add_option('osx-version-min',
+add_option(
+ 'osx-version-min',
help='minimum OS X version to support',
)
# https://docs.microsoft.com/en-us/cpp/porting/modifying-winver-and-win32-winnt?view=vs-2017
# https://docs.microsoft.com/en-us/windows-server/get-started/windows-server-release-info
win_version_min_choices = {
- 'win10' : ('0A00', '0000'),
- 'ws2016' : ('0A00', '1607'),
- 'ws2019' : ('0A00', '1809')
+ 'win10': ('0A00', '0000'),
+ 'ws2016': ('0A00', '1607'),
+ 'ws2019': ('0A00', '1809'),
}
-add_option('win-version-min',
+add_option(
+ 'win-version-min',
choices=list(win_version_min_choices.keys()),
default=None,
help='minimum Windows version to support',
type='choice',
)
-add_option('cache',
+add_option(
+ 'cache',
choices=["all", "nolinked"],
const='all',
help='Use an object cache rather than a per-build variant directory (experimental)',
nargs='?',
)
-add_option('cache-dir',
+add_option(
+ 'cache-dir',
default='$BUILD_ROOT/scons/cache',
help='Specify the directory to use for caching objects if --cache is in use',
)
-add_option('cache-signature-mode',
+add_option(
+ 'cache-signature-mode',
choices=['none', 'validate'],
default="none",
help='Extra check to validate integrity of cache files after pulling from cache',
)
-add_option("cxx-std",
+add_option(
+ "cxx-std",
choices=["17", "20"],
default="17",
help="Select the C++ language standard to build with",
)
+
def find_mongo_custom_variables():
files = []
paths = [path for path in sys.path if 'site_scons' in path]
@@ -494,40 +562,47 @@ def find_mongo_custom_variables():
files.append(probe)
return files
-add_option('variables-files',
+
+add_option(
+ 'variables-files',
default=[],
action="append",
help="Specify variables files to load.",
)
link_model_choices = ['auto', 'object', 'static', 'dynamic', 'dynamic-strict', 'dynamic-sdk']
-add_option('link-model',
+add_option(
+ 'link-model',
choices=link_model_choices,
default='auto',
help='Select the linking model for the project',
- type='choice'
+ type='choice',
)
-add_option('linker',
+add_option(
+ 'linker',
choices=['auto', 'gold', 'lld', 'bfd'],
default='auto',
help='Specify the type of linker to use.',
- type='choice'
+ type='choice',
)
-variable_parse_mode_choices=['auto', 'posix', 'other']
-add_option('variable-parse-mode',
+variable_parse_mode_choices = ['auto', 'posix', 'other']
+add_option(
+ 'variable-parse-mode',
choices=variable_parse_mode_choices,
default=variable_parse_mode_choices[0],
help='Select which parsing mode is used to interpret command line variables',
type='choice',
)
-add_option('modules',
+add_option(
+ 'modules',
help="Comma-separated list of modules to build. Empty means none. Default is all.",
)
-add_option('runtime-hardening',
+add_option(
+ 'runtime-hardening',
choices=["on", "off"],
default="on",
help="Enable runtime hardening features (e.g. stack smash protection)",
@@ -543,24 +618,27 @@ experimental_runtime_hardening_choices = ['*']
experimental_runtime_hardening_choices.extend("+" + opt for opt in experimental_runtime_hardenings)
experimental_runtime_hardening_choices.extend("-" + opt for opt in experimental_runtime_hardenings)
-add_option('experimental-runtime-hardening',
+add_option(
+ 'experimental-runtime-hardening',
action="append",
choices=experimental_runtime_hardening_choices,
const=experimental_runtime_hardening_choices[0],
default=[],
help='Enable experimental runtime hardenings',
nargs='?',
- type='choice'
+ type='choice',
)
-add_option('use-hardware-crc32',
+add_option(
+ 'use-hardware-crc32',
choices=["on", "off"],
default="on",
help="Enable CRC32 hardware acceleration",
type='choice',
)
-add_option('git-decider',
+add_option(
+ 'git-decider',
choices=["on", "off"],
const='on',
default="off",
@@ -569,19 +647,23 @@ add_option('git-decider',
type="choice",
)
-add_option('toolchain-root',
+add_option(
+ 'toolchain-root',
default=None,
help="Name a toolchain root for use with toolchain selection Variables files in etc/scons",
)
-add_option('msvc-debugging-format',
+add_option(
+ 'msvc-debugging-format',
choices=["codeview", "pdb"],
default="codeview",
- help='Debugging format in debug builds using msvc. Codeview (/Z7) or Program database (/Zi). Default is codeview.',
+ help=
+ 'Debugging format in debug builds using msvc. Codeview (/Z7) or Program database (/Zi). Default is codeview.',
type='choice',
)
-add_option('use-libunwind',
+add_option(
+ 'use-libunwind',
choices=["on", "off", "auto"],
const="on",
default="auto",
@@ -590,26 +672,31 @@ add_option('use-libunwind',
type='choice',
)
-add_option('jlink',
- help="Limit link concurrency. Takes either an integer to limit to or a"
- " float between 0 and 1.0 whereby jobs will be multiplied to get the final"
- " jlink value."
- "\n\nExample: --jlink=0.75 --jobs 8 will result in a jlink value of 6",
- const=0.5,
- default=None,
- nargs='?',
- type=float)
-
-add_option('enable-usdt-probes',
- choices=["on", "off", "auto"],
- default="auto",
- help='Enable USDT probes. Default is auto, which is enabled only on Linux with SystemTap headers',
- type='choice',
+add_option(
+ 'jlink',
+ help="Limit link concurrency. Takes either an integer to limit to or a"
+ " float between 0 and 1.0 whereby jobs will be multiplied to get the final"
+ " jlink value."
+ "\n\nExample: --jlink=0.75 --jobs 8 will result in a jlink value of 6",
+ const=0.5,
+ default=None,
+ nargs='?',
+ type=float,
+)
+
+add_option(
+ 'enable-usdt-probes',
+ choices=["on", "off", "auto"],
+ default="auto",
+ help=
+ 'Enable USDT probes. Default is auto, which is enabled only on Linux with SystemTap headers',
+ type='choice',
nargs='?',
const='on',
)
-add_option('libdeps-debug',
+add_option(
+ 'libdeps-debug',
choices=['on', 'off'],
const='off',
help='Print way too much debugging information on how libdeps is handling dependencies.',
@@ -617,7 +704,8 @@ add_option('libdeps-debug',
type='choice',
)
-add_option('libdeps-linting',
+add_option(
+ 'libdeps-linting',
choices=['on', 'off', 'print'],
const='on',
default='on',
@@ -626,7 +714,8 @@ add_option('libdeps-linting',
type='choice',
)
-add_option('visibility-support',
+add_option(
+ 'visibility-support',
choices=['auto', 'on', 'off'],
const='auto',
default='auto',
@@ -660,6 +749,7 @@ except ValueError as e:
print(("Error decoding version.json: {0}".format(e)))
Exit(1)
+
# Setup the command-line variables
def variable_shlex_converter(val):
# If the argument is something other than a string, propagate
@@ -671,12 +761,13 @@ def variable_shlex_converter(val):
parse_mode = 'other' if mongo_platform.is_running_os('windows') else 'posix'
return shlex.split(val, posix=(parse_mode == 'posix'))
+
def variable_arch_converter(val):
arches = {
'x86_64': 'x86_64',
- 'amd64': 'x86_64',
- 'emt64': 'x86_64',
- 'x86': 'i386',
+ 'amd64': 'x86_64',
+ 'emt64': 'x86_64',
+ 'x86': 'i386',
}
val = val.lower()
@@ -691,6 +782,7 @@ def variable_arch_converter(val):
# Return whatever val is passed in - hopefully it's legit
return val
+
# The Scons 'default' tool enables a lot of tools that we don't actually need to enable.
# On platforms like Solaris, it actually does the wrong thing by enabling the sunstudio
# toolchain first. As such it is simpler and more efficient to manually load the precise
@@ -708,6 +800,7 @@ def decide_platform_tools():
else:
return ["default"]
+
def variable_tools_converter(val):
tool_list = shlex.split(val)
# This list is intentionally not sorted; the order of tool loading
@@ -726,15 +819,18 @@ def variable_tools_converter(val):
"textfile",
]
+
def variable_distsrc_converter(val):
if not val.endswith("/"):
return val + "/"
return val
+
def fatal_error(env, msg, *args):
print(msg.format(*args))
Exit(1)
+
# Apply the default variables files, and walk the provided
# arguments. Interpret any falsy argument (like the empty string) as
# resetting any prior state. This makes the argument
@@ -754,70 +850,94 @@ for vf in variables_files:
env_vars = Variables(
files=variables_files,
- args=ARGUMENTS
+ args=ARGUMENTS,
)
sconsflags = os.environ.get('SCONSFLAGS', None)
if sconsflags:
print(("Using SCONSFLAGS environment variable arguments: %s" % sconsflags))
-env_vars.Add('ABIDW',
- help="Configures the path to the 'abidw' (a libabigail) utility")
+env_vars.Add(
+ 'ABIDW',
+ help="Configures the path to the 'abidw' (a libabigail) utility",
+)
-env_vars.Add('AR',
- help='Sets path for the archiver')
+env_vars.Add(
+ 'AR',
+ help='Sets path for the archiver',
+)
-env_vars.Add('ARFLAGS',
+env_vars.Add(
+ 'ARFLAGS',
help='Sets flags for the archiver',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('CCACHE',
- help='Tells SCons where the ccache binary is')
+env_vars.Add(
+ 'CCACHE',
+ help='Tells SCons where the ccache binary is',
+)
env_vars.Add(
'CACHE_SIZE',
help='Maximum size of the SCons cache (in gigabytes)',
default=32,
- converter=lambda x:int(x)
+ converter=lambda x: int(x),
)
env_vars.Add(
'CACHE_PRUNE_TARGET',
help='Maximum percent in-use in SCons cache after pruning',
default=66,
- converter=lambda x:int(x)
+ converter=lambda x: int(x),
)
-env_vars.Add('CC',
- help='Selects the C compiler to use')
+env_vars.Add(
+ 'CC',
+ help='Selects the C compiler to use',
+)
-env_vars.Add('CCFLAGS',
+env_vars.Add(
+ 'CCFLAGS',
help='Sets flags for the C and C++ compiler',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('ASFLAGS',
+env_vars.Add(
+ 'ASFLAGS',
help='Sets assembler specific flags',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('CFLAGS',
+env_vars.Add(
+ 'CFLAGS',
help='Sets flags for the C compiler',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('CPPDEFINES',
+env_vars.Add(
+ 'CPPDEFINES',
help='Sets pre-processor definitions for C and C++',
converter=variable_shlex_converter,
- default=[])
+ default=[],
+)
-env_vars.Add('CPPPATH',
+env_vars.Add(
+ 'CPPPATH',
help='Adds paths to the preprocessor search path',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('CXX',
- help='Selects the C++ compiler to use')
+env_vars.Add(
+ 'CXX',
+ help='Selects the C++ compiler to use',
+)
-env_vars.Add('CXXFLAGS',
+env_vars.Add(
+ 'CXXFLAGS',
help='Sets flags for the C++ compiler',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
default_destdir = '$BUILD_ROOT/install'
if get_option('ninja') != 'disabled':
@@ -826,142 +946,207 @@ if get_option('ninja') != 'disabled':
# ninja files need to build to different install dirs.
default_destdir = '$BUILD_DIR/install'
-env_vars.Add('DESTDIR',
+env_vars.Add(
+ 'DESTDIR',
help='Where builds will install files',
- default=default_destdir)
+ default=default_destdir,
+)
-env_vars.Add('DSYMUTIL',
+env_vars.Add(
+ 'DSYMUTIL',
help='Path to the dsymutil utility',
)
-env_vars.Add('GITDIFFFLAGS',
+env_vars.Add(
+ 'GITDIFFFLAGS',
help='Sets flags for git diff',
- default='')
+ default='',
+)
-env_vars.Add('REVISION',
+env_vars.Add(
+ 'REVISION',
help='Base git revision',
- default='')
+ default='',
+)
-env_vars.Add('ENTERPRISE_REV',
+env_vars.Add(
+ 'ENTERPRISE_REV',
help='Base git revision of enterprise modules',
- default='')
+ default='',
+)
# Note: This probably is only really meaningful when configured via a variables file. It will
# also override whatever the SCons platform defaults would be.
-env_vars.Add('ENV',
- help='Sets the environment for subprocesses')
+env_vars.Add(
+ 'ENV',
+ help='Sets the environment for subprocesses',
+)
-env_vars.Add('FRAMEWORKPATH',
+env_vars.Add(
+ 'FRAMEWORKPATH',
help='Adds paths to the linker search path for darwin frameworks',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('FRAMEWORKS',
+env_vars.Add(
+ 'FRAMEWORKS',
help='Adds extra darwin frameworks to link against',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('HOST_ARCH',
+env_vars.Add(
+ 'HOST_ARCH',
help='Sets the native architecture of the compiler',
converter=variable_arch_converter,
- default=None)
+ default=None,
+)
-env_vars.Add('ICECC',
- help='Tells SCons where icecream icecc tool is')
+env_vars.Add(
+ 'ICECC',
+ help='Tells SCons where icecream icecc tool is',
+)
-env_vars.Add('ICERUN',
- help='Tells SCons where icecream icerun tool is')
+env_vars.Add(
+ 'ICERUN',
+ help='Tells SCons where icecream icerun tool is',
+)
-env_vars.Add('ICECC_CREATE_ENV',
+env_vars.Add(
+ 'ICECC_CREATE_ENV',
help='Tells SCons where icecc-create-env tool is',
- default='icecc-create-env')
+ default='icecc-create-env',
+)
-env_vars.Add('ICECC_DEBUG',
+env_vars.Add(
+ 'ICECC_DEBUG',
help='Tell ICECC to create debug logs (auto, on/off true/false 1/0)',
- default=False)
+ default=False,
+)
-env_vars.Add('ICECC_SCHEDULER',
- help='Tells ICECC where the scheduler daemon is running')
+env_vars.Add(
+ 'ICECC_SCHEDULER',
+ help='Tells ICECC where the scheduler daemon is running',
+)
-env_vars.Add('ICECC_VERSION',
- help='Tells ICECC where the compiler package is')
+env_vars.Add(
+ 'ICECC_VERSION',
+ help='Tells ICECC where the compiler package is',
+)
-env_vars.Add('ICECC_VERSION_ARCH',
- help='Tells ICECC the target architecture for the compiler package, if non-native')
+env_vars.Add(
+ 'ICECC_VERSION_ARCH',
+ help='Tells ICECC the target architecture for the compiler package, if non-native',
+)
-env_vars.Add('LIBPATH',
+env_vars.Add(
+ 'LIBPATH',
help='Adds paths to the linker search path',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('LIBS',
+env_vars.Add(
+ 'LIBS',
help='Adds extra libraries to link against',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('LINKFLAGS',
+env_vars.Add(
+ 'LINKFLAGS',
help='Sets flags for the linker',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('LLVM_SYMBOLIZER',
- help='Name of or path to the LLVM symbolizer')
+env_vars.Add(
+ 'LLVM_SYMBOLIZER',
+ help='Name of or path to the LLVM symbolizer',
+)
-env_vars.Add('MAXLINELENGTH',
+env_vars.Add(
+ 'MAXLINELENGTH',
help='Maximum line length before using temp files',
# This is very small, but appears to be the least upper bound
# across our platforms.
#
# See https://support.microsoft.com/en-us/help/830473/command-prompt-cmd.-exe-command-line-string-limitation
- default=4095)
+ default=4095,
+)
# Note: This is only really meaningful when configured via a variables file. See the
# default_buildinfo_environment_data() function for examples of how to use this.
-env_vars.Add('MONGO_BUILDINFO_ENVIRONMENT_DATA',
+env_vars.Add(
+ 'MONGO_BUILDINFO_ENVIRONMENT_DATA',
help='Sets the info returned from the buildInfo command and --version command-line flag',
- default=mongo_generators.default_buildinfo_environment_data())
+ default=mongo_generators.default_buildinfo_environment_data(),
+)
-env_vars.Add('MONGO_DIST_SRC_PREFIX',
+env_vars.Add(
+ 'MONGO_DIST_SRC_PREFIX',
help='Sets the prefix for files in the source distribution archive',
converter=variable_distsrc_converter,
- default="mongodb-src-r${MONGO_VERSION}")
+ default="mongodb-src-r${MONGO_VERSION}",
+)
-env_vars.Add('MONGO_DISTARCH',
+env_vars.Add(
+ 'MONGO_DISTARCH',
help='Adds a string representing the target processor architecture to the dist archive',
- default='$TARGET_ARCH')
+ default='$TARGET_ARCH',
+)
-env_vars.Add('MONGO_DISTMOD',
+env_vars.Add(
+ 'MONGO_DISTMOD',
help='Adds a string that will be embedded in the dist archive naming',
- default='')
+ default='',
+)
-env_vars.Add('MONGO_DISTNAME',
+env_vars.Add(
+ 'MONGO_DISTNAME',
help='Sets the version string to be used in dist archive naming',
- default='$MONGO_VERSION')
+ default='$MONGO_VERSION',
+)
+
def validate_mongo_version(key, val, env):
valid_version_re = re.compile(r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc)(\d+))?.*)?$', re.MULTILINE)
invalid_version_re = re.compile(r'^0\.0\.0(?:-.*)?', re.MULTILINE)
if not valid_version_re.match(val) or invalid_version_re.match(val):
- print(("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val)))
+ print((
+ "Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons"
+ .format(val)))
Exit(1)
-env_vars.Add('MONGO_VERSION',
+
+env_vars.Add(
+ 'MONGO_VERSION',
help='Sets the version string for MongoDB',
default=version_data['version'],
- validator=validate_mongo_version)
+ validator=validate_mongo_version,
+)
-env_vars.Add('MONGO_GIT_HASH',
+env_vars.Add(
+ 'MONGO_GIT_HASH',
help='Sets the githash to store in the MongoDB version information',
- default=version_data['githash'])
+ default=version_data['githash'],
+)
-env_vars.Add('MSVC_USE_SCRIPT',
- help='Sets the script used to setup Visual Studio.')
+env_vars.Add(
+ 'MSVC_USE_SCRIPT',
+ help='Sets the script used to setup Visual Studio.',
+)
-env_vars.Add('MSVC_VERSION',
+env_vars.Add(
+ 'MSVC_VERSION',
help='Sets the version of Visual C++ to use (e.g. 14.2 for VS2019, 14.3 for VS2022)',
- default="14.3")
+ default="14.3",
+)
-env_vars.Add('NINJA_BUILDDIR',
+env_vars.Add(
+ 'NINJA_BUILDDIR',
help="Location for shared Ninja state",
default="$BUILD_DIR/ninja",
)
-env_vars.Add('NINJA_PREFIX',
+env_vars.Add(
+ 'NINJA_PREFIX',
default="build",
help="""A prefix to add to the beginning of generated ninja
files. Useful for when compiling multiple build ninja files for
@@ -977,11 +1162,11 @@ Will generate the files (respectively):
Defaults to build. Best used with the generate-ninja alias so you don't have to
reiterate the prefix in the target name and variable.
-""")
-
+""",
+)
-env_vars.Add('NINJA_SUFFIX',
- help="""A suffix to add to the end of generated build.ninja
+env_vars.Add(
+ 'NINJA_SUFFIX', help="""A suffix to add to the end of generated build.ninja
files. Useful for when compiling multiple build ninja files for
different configurations, for instance:
@@ -994,97 +1179,134 @@ Will generate the files (respectively):
build.ninja.tsan
""")
-env_vars.Add('__NINJA_NO',
+env_vars.Add(
+ '__NINJA_NO',
help="Disables the Ninja tool unconditionally. Not intended for human use.",
- default=0)
-
+ default=0,
+)
-env_vars.Add('OBJCOPY',
+env_vars.Add(
+ 'OBJCOPY',
help='Sets the path to objcopy',
- default=WhereIs('objcopy'))
-
+ default=WhereIs('objcopy'),
+)
-env_vars.Add('PKGDIR',
+env_vars.Add(
+ 'PKGDIR',
help='Directory in which to build packages and archives',
- default='$BUILD_DIR/pkgs')
+ default='$BUILD_DIR/pkgs',
+)
-env_vars.Add('PREFIX',
+env_vars.Add(
+ 'PREFIX',
help='Final installation location of files. Will be made into a sub dir of $DESTDIR',
- default='.')
+ default='.',
+)
# Exposed to be able to cross compile Android/*nix from Windows without ending up with the .exe suffix.
-env_vars.Add('PROGSUFFIX',
- help='Sets the suffix for built executable files')
+env_vars.Add(
+ 'PROGSUFFIX',
+ help='Sets the suffix for built executable files',
+)
-env_vars.Add('RPATH',
+env_vars.Add(
+ 'RPATH',
help='Set the RPATH for dynamic libraries and executables',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('SHCCFLAGS',
+env_vars.Add(
+ 'SHCCFLAGS',
help='Sets flags for the C and C++ compiler when building shared libraries',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('SHCFLAGS',
+env_vars.Add(
+ 'SHCFLAGS',
help='Sets flags for the C compiler when building shared libraries',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('SHCXXFLAGS',
+env_vars.Add(
+ 'SHCXXFLAGS',
help='Sets flags for the C++ compiler when building shared libraries',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('SHELL',
- help='Picks the shell to use when spawning commands')
+env_vars.Add(
+ 'SHELL',
+ help='Picks the shell to use when spawning commands',
+)
-env_vars.Add('SHLINKFLAGS',
+env_vars.Add(
+ 'SHLINKFLAGS',
help='Sets flags for the linker when building shared libraries',
- converter=variable_shlex_converter)
+ converter=variable_shlex_converter,
+)
-env_vars.Add('SHLINKFLAGS_EXTRA',
- help='Adds additional flags for shared links without overwriting tool configured SHLINKFLAGS values',
- converter=variable_shlex_converter)
+env_vars.Add(
+ 'SHLINKFLAGS_EXTRA',
+ help=
+ 'Adds additional flags for shared links without overwriting tool configured SHLINKFLAGS values',
+ converter=variable_shlex_converter,
+)
-env_vars.Add('STRIP',
+env_vars.Add(
+ 'STRIP',
help='Path to the strip utility (non-darwin platforms probably use OBJCOPY for this)',
)
-env_vars.Add('TAPI',
- help="Configures the path to the 'tapi' (an Xcode) utility")
+env_vars.Add(
+ 'TAPI',
+ help="Configures the path to the 'tapi' (an Xcode) utility",
+)
-env_vars.Add('TARGET_ARCH',
+env_vars.Add(
+ 'TARGET_ARCH',
help='Sets the architecture to build for',
converter=variable_arch_converter,
- default=None)
+ default=None,
+)
-env_vars.Add('TARGET_OS',
+env_vars.Add(
+ 'TARGET_OS',
help='Sets the target OS to build for',
- default=mongo_platform.get_running_os_name())
+ default=mongo_platform.get_running_os_name(),
+)
-env_vars.Add('TOOLS',
+env_vars.Add(
+ 'TOOLS',
help='Sets the list of SCons tools to add to the environment',
converter=variable_tools_converter,
- default=decide_platform_tools())
+ default=decide_platform_tools(),
+)
-env_vars.Add('VARIANT_DIR',
+env_vars.Add(
+ 'VARIANT_DIR',
help='Sets the name (or generator function) for the variant directory',
default=mongo_generators.default_variant_dir_generator,
)
-env_vars.Add('VERBOSE',
+env_vars.Add(
+ 'VERBOSE',
help='Controls build verbosity (auto, on/off true/false 1/0)',
default='auto',
)
-env_vars.Add('WINDOWS_OPENSSL_BIN',
+env_vars.Add(
+ 'WINDOWS_OPENSSL_BIN',
help='Sets the path to the openssl binaries for packaging',
- default='c:/openssl/bin')
+ default='c:/openssl/bin',
+)
# TODO SERVER-42170 switch to PathIsDirCreate validator
-env_vars.Add(PathVariable(
+env_vars.Add(
+ PathVariable(
"LOCAL_TMPDIR",
help='Set the TMPDIR when running tests.',
default='$BUILD_ROOT/tmp_test_data',
- validator=PathVariable.PathAccept
-))
+ validator=PathVariable.PathAccept,
+ ), )
# -- Validate user provided options --
# A dummy environment that should *only* have the variables we have set. In practice it has
@@ -1106,7 +1328,9 @@ if GetOption('help'):
try:
Help('\nThe following variables may also be set like scons VARIABLE=value\n', append=True)
Help(env_vars.GenerateHelpText(variables_only_env), append=True)
- Help('\nThe \'list-targets\' target can be built to list useful comprehensive build targets\n', append=True)
+ Help(
+ '\nThe \'list-targets\' target can be built to list useful comprehensive build targets\n',
+ append=True)
except TypeError:
# The append=true kwarg is only supported in scons>=2.4. Without it, calls to Help() clobber
# the automatically generated options help, which we don't want. Users on older scons
@@ -1140,29 +1364,32 @@ if cacheDir[0] not in ['$', '#']:
sconsDataDir = Dir(buildDir).Dir('scons')
SConsignFile(str(sconsDataDir.File('sconsign.py3')))
+
def printLocalInfo():
import sys, SCons
- print(( "scons version: " + SCons.__version__ ))
- print(( "python version: " + " ".join( [ repr(i) for i in sys.version_info ] ) ))
+ print(("scons version: " + SCons.__version__))
+ print(("python version: " + " ".join([repr(i) for i in sys.version_info])))
+
printLocalInfo()
-boostLibs = [ "filesystem", "program_options", "system", "iostreams", "thread", "log" ]
+boostLibs = ["filesystem", "program_options", "system", "iostreams", "thread", "log"]
-onlyServer = len( COMMAND_LINE_TARGETS ) == 0 or ( len( COMMAND_LINE_TARGETS ) == 1 and str( COMMAND_LINE_TARGETS[0] ) in [ "mongod" , "mongos" , "test" ] )
+onlyServer = len(COMMAND_LINE_TARGETS) == 0 or (len(COMMAND_LINE_TARGETS) == 1 and str(
+ COMMAND_LINE_TARGETS[0]) in ["mongod", "mongos", "test"])
releaseBuild = has_option("release")
dbg_opt_mapping = {
# --dbg, --opt : dbg opt
- ( "on", None ) : ( True, False ), # special case interaction
- ( "on", "on" ) : ( True, True ),
- ( "on", "off" ) : ( True, False ),
- ( "off", None ) : ( False, True ),
- ( "off", "on" ) : ( False, True ),
- ( "off", "off" ) : ( False, False ),
- ( "on", "size" ) : ( True, True ),
- ( "off", "size" ) : ( False, True ),
+ ("on", None): (True, False), # special case interaction
+ ("on", "on"): (True, True),
+ ("on", "off"): (True, False),
+ ("off", None): (False, True),
+ ("off", "on"): (False, True),
+ ("off", "off"): (False, False),
+ ("on", "size"): (True, True),
+ ("off", "size"): (False, True),
}
debugBuild, optBuild = dbg_opt_mapping[(get_option('dbg'), get_option('opt'))]
optBuildForSize = True if optBuild and get_option('opt') == "size" else False
@@ -1171,11 +1398,11 @@ if releaseBuild and (debugBuild or not optBuild):
print("Error: A --release build may not have debugging, and must have optimization")
Exit(1)
-noshell = has_option( "noshell" )
+noshell = has_option("noshell")
-jsEngine = get_option( "js-engine")
+jsEngine = get_option("js-engine")
-serverJs = get_option( "server-js" ) == "on"
+serverJs = get_option("server-js") == "on"
if not serverJs and not jsEngine:
print("Warning: --server-js=off is not needed with --js-engine=none")
@@ -1189,28 +1416,29 @@ if not serverJs and not jsEngine:
# also need an Environment variable for the argument to --build-dir, which is the parent of all
# variant dirs. For now, we call that BUILD_ROOT. If and when we s/BUILD_DIR/VARIANT_DIR/g,
# then also s/BUILD_ROOT/BUILD_DIR/g.
-envDict = dict(BUILD_ROOT=buildDir,
- BUILD_DIR=make_variant_dir_generator(),
- DIST_ARCHIVE_SUFFIX='.tgz',
- MODULE_BANNERS=[],
- MODULE_INJECTORS=dict(),
- PYTHON="$( {} $)".format(sys.executable),
- SERVER_ARCHIVE='${SERVER_DIST_BASENAME}${DIST_ARCHIVE_SUFFIX}',
- UNITTEST_ALIAS='install-unittests',
- # TODO: Move unittests.txt to $BUILD_DIR, but that requires
- # changes to MCI.
- UNITTEST_LIST='$BUILD_ROOT/unittests.txt',
- LIBFUZZER_TEST_ALIAS='install-fuzzertests',
- LIBFUZZER_TEST_LIST='$BUILD_ROOT/libfuzzer_tests.txt',
- INTEGRATION_TEST_ALIAS='install-integration-tests',
- INTEGRATION_TEST_LIST='$BUILD_ROOT/integration_tests.txt',
- BENCHMARK_ALIAS='install-benchmarks',
- BENCHMARK_LIST='$BUILD_ROOT/benchmarks.txt',
- CONFIGUREDIR='$BUILD_ROOT/scons/$VARIANT_DIR/sconf_temp',
- CONFIGURELOG='$BUILD_ROOT/scons/config.log',
- CONFIG_HEADER_DEFINES={},
- LIBDEPS_TAG_EXPANSIONS=[],
- )
+envDict = dict(
+ BUILD_ROOT=buildDir,
+ BUILD_DIR=make_variant_dir_generator(),
+ DIST_ARCHIVE_SUFFIX='.tgz',
+ MODULE_BANNERS=[],
+ MODULE_INJECTORS=dict(),
+ PYTHON="$( {} $)".format(sys.executable),
+ SERVER_ARCHIVE='${SERVER_DIST_BASENAME}${DIST_ARCHIVE_SUFFIX}',
+ UNITTEST_ALIAS='install-unittests',
+ # TODO: Move unittests.txt to $BUILD_DIR, but that requires
+ # changes to MCI.
+ UNITTEST_LIST='$BUILD_ROOT/unittests.txt',
+ LIBFUZZER_TEST_ALIAS='install-fuzzertests',
+ LIBFUZZER_TEST_LIST='$BUILD_ROOT/libfuzzer_tests.txt',
+ INTEGRATION_TEST_ALIAS='install-integration-tests',
+ INTEGRATION_TEST_LIST='$BUILD_ROOT/integration_tests.txt',
+ BENCHMARK_ALIAS='install-benchmarks',
+ BENCHMARK_LIST='$BUILD_ROOT/benchmarks.txt',
+ CONFIGUREDIR='$BUILD_ROOT/scons/$VARIANT_DIR/sconf_temp',
+ CONFIGURELOG='$BUILD_ROOT/scons/config.log',
+ CONFIG_HEADER_DEFINES={},
+ LIBDEPS_TAG_EXPANSIONS=[],
+)
# By default, we will get the normal SCons tool search. But if the
# user has opted into the next gen tools, add our experimental tool
@@ -1238,6 +1466,7 @@ if get_option('cache-signature-mode') == 'validate':
if sys.stdout.isatty():
Progress(['-\r', '\\\r', '|\r', '/\r'], interval=50)
+
# We are going to start running conf tests soon, so setup
# --disable-warnings-as-errors as soon as possible.
def create_werror_generator(flagname):
@@ -1255,6 +1484,7 @@ def create_werror_generator(flagname):
return generator
+
env.Append(
CCFLAGS=['$CCFLAGS_GENERATE_WERROR'],
CCFLAGS_GENERATE_WERROR=create_werror_generator('$CCFLAGS_WERROR'),
@@ -1264,7 +1494,6 @@ env.Append(
LINKFLAGS_GENERATE_WERROR=create_werror_generator('$LINKFLAGS_WERROR'),
)
-
for var in ['CC', 'CXX']:
if var not in env:
continue
@@ -1291,9 +1520,11 @@ def conf_error(env, msg, *args):
print("See {0} for details".format(env.File('$CONFIGURELOG').abspath))
Exit(1)
+
env.AddMethod(fatal_error, 'FatalError')
env.AddMethod(conf_error, 'ConfError')
+
def to_boolean(s):
if isinstance(s, bool):
return s
@@ -1303,6 +1534,7 @@ def to_boolean(s):
return False
raise ValueError(f'Invalid value {s}, must be a boolean-like string')
+
# Normalize the VERBOSE Option, and make its value available as a
# function.
if env['VERBOSE'] == "auto":
@@ -1332,8 +1564,11 @@ if get_option('install-action') != 'default' and get_option('ninja') != "disable
env.FatalError("Cannot use non-default install actions when generating Ninja.")
install_actions.setup(env, get_option('install-action'))
-def set_config_header_define(env, varname, varval = 1):
+
+def set_config_header_define(env, varname, varval=1):
env['CONFIG_HEADER_DEFINES'][varname] = varval
+
+
env.AddMethod(set_config_header_define, 'SetConfigHeaderDefine')
detectEnv = env.Clone()
@@ -1344,9 +1579,10 @@ detectEnv = env.Clone()
toolchain_macros = {
'GCC': 'defined(__GNUC__) && !defined(__clang__)',
'clang': 'defined(__clang__)',
- 'MSVC': 'defined(_MSC_VER)'
+ 'MSVC': 'defined(_MSC_VER)',
}
+
def CheckForToolchain(context, toolchain, lang_name, compiler_var, source_suffix):
test_body = textwrap.dedent("""
#if {0}
@@ -1367,20 +1603,22 @@ def CheckForToolchain(context, toolchain, lang_name, compiler_var, source_suffix
context.Result(result)
return result
-endian = get_option( "endian" )
+
+endian = get_option("endian")
if endian == "auto":
endian = sys.byteorder
processor_macros = {
- 'aarch64' : { 'endian': 'little', 'check': '(defined(__arm64__) || defined(__aarch64__))' },
- 'emscripten' : { 'endian': 'little', 'check': '(defined(__EMSCRIPTEN__))' },
- 'ppc64le' : { 'endian': 'little', 'check': '(defined(__powerpc64__))' },
- 'riscv64' : { 'endian': 'little', 'check': '(defined(__riscv)) && (__riscv_xlen == 64)' },
- 's390x' : { 'endian': 'big', 'check': '(defined(__s390x__))' },
- 'x86_64' : { 'endian': 'little', 'check': '(defined(__x86_64) || defined(_M_AMD64))' },
+ 'aarch64': {'endian': 'little', 'check': '(defined(__arm64__) || defined(__aarch64__))'},
+ 'emscripten': {'endian': 'little', 'check': '(defined(__EMSCRIPTEN__))'},
+ 'ppc64le': {'endian': 'little', 'check': '(defined(__powerpc64__))'},
+ 'riscv64': {'endian': 'little', 'check': '(defined(__riscv)) && (__riscv_xlen == 64)'},
+ 's390x': {'endian': 'big', 'check': '(defined(__s390x__))'},
+ 'x86_64': {'endian': 'little', 'check': '(defined(__x86_64) || defined(_M_AMD64))'},
}
+
def CheckForProcessor(context, which_arch):
def run_compile_check(arch):
if not endian == processor_macros[arch]['endian']:
@@ -1411,6 +1649,7 @@ def CheckForProcessor(context, which_arch):
context.Result('Could not detect processor model/architecture')
return False
+
# Taken from http://nadeausoftware.com/articles/2012/01/c_c_tip_how_use_compiler_predefined_macros_detect_operating_system
os_macros = {
"windows": "defined(_WIN32)",
@@ -1433,6 +1672,7 @@ os_macros = {
"emscripten": "defined(__EMSCRIPTEN__)",
}
+
def CheckForOS(context, which_os):
test_body = """
#if defined(__APPLE__)
@@ -1449,6 +1689,7 @@ def CheckForOS(context, which_os):
context.Result(ret)
return ret
+
def CheckForCXXLink(context):
test_body = """
#include <iostream>
@@ -1464,16 +1705,20 @@ def CheckForCXXLink(context):
context.Result(ret)
return ret
-detectSystem = Configure(detectEnv, help=False, custom_tests = {
- 'CheckForToolchain' : CheckForToolchain,
- 'CheckForProcessor': CheckForProcessor,
- 'CheckForOS': CheckForOS,
-})
+detectSystem = Configure(
+ detectEnv,
+ help=False,
+ custom_tests={
+ 'CheckForToolchain': CheckForToolchain,
+ 'CheckForProcessor': CheckForProcessor,
+ 'CheckForOS': CheckForOS,
+ },
+)
-toolchain_search_sequence = [ "GCC", "clang" ]
+toolchain_search_sequence = ["GCC", "clang"]
if mongo_platform.is_running_os('windows'):
- toolchain_search_sequence = [ 'MSVC', 'clang', 'GCC' ]
+ toolchain_search_sequence = ['MSVC', 'clang', 'GCC']
detected_toolchain = None
for candidate_toolchain in toolchain_search_sequence:
@@ -1487,11 +1732,14 @@ if not detected_toolchain:
if not detectSystem.CheckForToolchain(detected_toolchain, "C", "CC", ".c"):
env.ConfError("C compiler does not match identified C++ compiler")
+
# Now that we've detected the toolchain, we add methods to the env
# to get the canonical name of the toolchain and to test whether
# scons is using a particular toolchain.
def get_toolchain_name(self):
return detected_toolchain.lower()
+
+
def is_toolchain(self, *args):
actual_toolchain = self.ToolchainName()
for v in args:
@@ -1499,6 +1747,7 @@ def is_toolchain(self, *args):
return True
return False
+
env.AddMethod(get_toolchain_name, 'ToolchainName')
env.AddMethod(is_toolchain, 'ToolchainIs')
@@ -1523,17 +1772,19 @@ if env.TargetOSIs('posix'):
env.Append(
CCFLAGS_WERROR=["-Werror"],
CXXFLAGS_WERROR=['-Werror=unused-result'] if env.ToolchainIs('clang') else [],
- LINKFLAGS_WERROR=['-Wl,-fatal_warnings' if env.TargetOSIs('darwin') else "-Wl,--fatal-warnings"],
+ LINKFLAGS_WERROR=[
+ '-Wl,-fatal_warnings' if env.TargetOSIs('darwin') else "-Wl,--fatal-warnings"
+ ],
)
elif env.TargetOSIs('windows'):
- env.Append(
- CCFLAGS_WERROR=["/WX"]
- )
+ env.Append(CCFLAGS_WERROR=["/WX"])
if env.ToolchainIs('clang'):
+
def assembler_with_cpp_gen(target, source, env, for_signature):
if source[0].get_suffix() == '.sx':
return '-x assembler-with-cpp'
+
env['CLANG_ASSEMBLER_WITH_CPP'] = assembler_with_cpp_gen
env.Append(ASFLAGS=['$CLANG_ASSEMBLER_WITH_CPP'])
@@ -1579,7 +1830,9 @@ if link_model == "auto":
link_model = "static"
if link_model.startswith('dynamic') and get_option('install-action') == 'symlink':
- env.FatalError(f"Options '--link-model={link_model}' not supported with '--install-action={get_option('install-action')}'.")
+ env.FatalError(
+ f"Options '--link-model={link_model}' not supported with '--install-action={get_option('install-action')}'."
+ )
# libunwind configuration.
# In which the following globals are set and normalized to bool:
@@ -1590,8 +1843,8 @@ use_libunwind = get_option("use-libunwind")
use_system_libunwind = use_system_version_of_library("libunwind")
# Assume system libunwind works if it's installed and selected.
-can_use_libunwind = (use_system_libunwind or
- env.TargetOSIs('linux') and (env['TARGET_ARCH'] in ('x86_64', 'aarch64', 'ppc64le', 's390x')))
+can_use_libunwind = (use_system_libunwind or env.TargetOSIs('linux') and
+ (env['TARGET_ARCH'] in ('x86_64', 'aarch64', 'ppc64le', 's390x')))
if use_libunwind == "off":
use_libunwind = False
@@ -1612,7 +1865,8 @@ if use_libunwind == True:
env.SetConfigHeaderDefine("MONGO_CONFIG_USE_LIBUNWIND")
if get_option('visibility-support') == 'auto':
- visibility_annotations_enabled = (not env.TargetOSIs('windows') and link_model.startswith("dynamic"))
+ visibility_annotations_enabled = (not env.TargetOSIs('windows')
+ and link_model.startswith("dynamic"))
else:
visibility_annotations_enabled = get_option('visibility-support') == 'on'
@@ -1620,7 +1874,8 @@ else:
# we have annotated functions for export.
if env.TargetOSIs('windows') and not visibility_annotations_enabled:
if link_model not in ['object', 'static', 'dynamic-sdk']:
- env.FatalError("Windows builds must use the 'object', 'dynamic-sdk', or 'static' link models")
+ env.FatalError(
+ "Windows builds must use the 'object', 'dynamic-sdk', or 'static' link models")
# The 'object' mode for libdeps is enabled by setting _LIBDEPS to $_LIBDEPS_OBJS. The other two
# modes operate in library mode, enabled by setting _LIBDEPS to $_LIBDEPS_LIBS.
@@ -1658,6 +1913,7 @@ if link_model.startswith("dynamic"):
if env.get('MONGO_API_NAME'):
return "-fvisibility=hidden"
return None
+
if not env.TargetOSIs('windows'):
env['MONGO_VISIBILITY_SHCCFLAGS_GENERATOR'] = visibility_shccflags_generator
@@ -1742,26 +1998,30 @@ if link_model.startswith("dynamic"):
# Darwin is strict by default
pass
else:
+
def libdeps_tags_expand_incomplete(source, target, env, for_signature):
# On darwin, since it is strict by default, we need to add a flag
# when libraries are tagged incomplete.
- if ('illegal_cyclic_or_unresolved_dependencies_allowlisted'
- in target[0].get_env().get("LIBDEPS_TAGS", [])):
+ if ('illegal_cyclic_or_unresolved_dependencies_allowlisted' in
+ target[0].get_env().get("LIBDEPS_TAGS", [])):
return ["-Wl,-undefined,dynamic_lookup"]
return []
+
env['LIBDEPS_TAG_EXPANSIONS'].append(libdeps_tags_expand_incomplete)
elif env.TargetOSIs('windows'):
if link_model == "dynamic-strict":
# Windows is strict by default
pass
else:
+
def libdeps_tags_expand_incomplete(source, target, env, for_signature):
# On windows, since it is strict by default, we need to add a flag
# when libraries are tagged incomplete.
- if ('illegal_cyclic_or_unresolved_dependencies_allowlisted'
- in target[0].get_env().get("LIBDEPS_TAGS", [])):
+ if ('illegal_cyclic_or_unresolved_dependencies_allowlisted' in
+ target[0].get_env().get("LIBDEPS_TAGS", [])):
return ["/FORCE:UNRESOLVED"]
return []
+
env['LIBDEPS_TAG_EXPANSIONS'].append(libdeps_tags_expand_incomplete)
else:
env.AppendUnique(LINKFLAGS=["-Wl,--no-as-needed"])
@@ -1776,28 +2036,21 @@ if link_model.startswith("dynamic"):
# default, we need to add a flag when libraries are not
# tagged incomplete.
def libdeps_tags_expand_incomplete(source, target, env, for_signature):
- if ('illegal_cyclic_or_unresolved_dependencies_allowlisted'
- not in target[0].get_env().get("LIBDEPS_TAGS", [])):
+ if ('illegal_cyclic_or_unresolved_dependencies_allowlisted' not in
+ target[0].get_env().get("LIBDEPS_TAGS", [])):
return ["-Wl,-z,defs"]
return []
- env['LIBDEPS_TAG_EXPANSIONS'].append(libdeps_tags_expand_incomplete)
+ env['LIBDEPS_TAG_EXPANSIONS'].append(libdeps_tags_expand_incomplete)
if optBuild:
env.SetConfigHeaderDefine("MONGO_CONFIG_OPTIMIZED_BUILD")
-
# Enable the fast decider if explicitly requested or if in 'auto' mode
# and not in conflict with other options like the ninja option which
# sets its own decider.
-if (
- get_option('ninja') == 'disabled' and
- get_option('build-fast-and-loose') == 'on' or
- (
- get_option('build-fast-and-loose') == 'auto' and
- not has_option('release')
- )
-):
+if (get_option('ninja') == 'disabled' and get_option('build-fast-and-loose') == 'on'
+ or (get_option('build-fast-and-loose') == 'auto' and not has_option('release'))):
# See http://www.scons.org/wiki/GoFastButton for details
env.Decider('MD5-timestamp')
env.SetOption('max_drift', 1)
@@ -1830,11 +2083,13 @@ if not env.TargetOSIs('windows'):
# following appends contents of SHLINKFLAGS_EXTRA variable to the linker command
env.AppendUnique(SHLINKFLAGS=['$SHLINKFLAGS_EXTRA'])
+
class ForceVerboseConftest():
"""
This class allows for configurable substition calls to enable forcing
the conftest to use verbose logs even when verbose mode is not specified.
"""
+
def __init__(self, msg):
self.msg = msg
@@ -1845,17 +2100,18 @@ class ForceVerboseConftest():
return None
return self.msg
+
if not env.Verbose():
# Even though we are not in Verbose mode, conftest logs should
# always be verbose, because they go to a file and not seen
# by the user anyways.
- env.Append( CCCOMSTR = ForceVerboseConftest("Compiling $TARGET") )
- env.Append( CXXCOMSTR = ForceVerboseConftest(env["CCCOMSTR"] ) )
- env.Append( SHCCCOMSTR = ForceVerboseConftest("Compiling $TARGET" ) )
- env.Append( SHCXXCOMSTR = ForceVerboseConftest(env["SHCCCOMSTR"] ) )
- env.Append( LINKCOMSTR = ForceVerboseConftest("Linking $TARGET" ) )
- env.Append( SHLINKCOMSTR = ForceVerboseConftest(env["LINKCOMSTR"] ) )
- env.Append( ARCOMSTR = ForceVerboseConftest("Generating library $TARGET" ) )
+ env.Append(CCCOMSTR=ForceVerboseConftest("Compiling $TARGET"))
+ env.Append(CXXCOMSTR=ForceVerboseConftest(env["CCCOMSTR"]))
+ env.Append(SHCCCOMSTR=ForceVerboseConftest("Compiling $TARGET"))
+ env.Append(SHCXXCOMSTR=ForceVerboseConftest(env["SHCCCOMSTR"]))
+ env.Append(LINKCOMSTR=ForceVerboseConftest("Linking $TARGET"))
+ env.Append(SHLINKCOMSTR=ForceVerboseConftest(env["LINKCOMSTR"]))
+ env.Append(ARCOMSTR=ForceVerboseConftest("Generating library $TARGET"))
# Link tools other than mslink don't setup TEMPFILE in LINKCOM,
# disabling SCons automatically falling back to a temp file when
@@ -1904,7 +2160,8 @@ if env['_LIBDEPS'] == '$_LIBDEPS_OBJS':
# we're expecting a Python function to execute here instead of
# pretending to be a CommandAction that just happens to not run a
# command but instead runs a function.
- env["BUILDERS"]["StaticLibrary"].action = SCons.Action.Action(write_uuid_to_file, "Generating placeholder library $TARGET")
+ env["BUILDERS"]["StaticLibrary"].action = SCons.Action.Action(
+ write_uuid_to_file, "Generating placeholder library $TARGET")
import libdeps
@@ -1912,7 +2169,8 @@ libdeps.setup_environment(
env,
emitting_shared=(link_model.startswith("dynamic")),
debug=get_option('libdeps-debug'),
- linting=get_option('libdeps-linting'))
+ linting=get_option('libdeps-linting'),
+)
# Both the abidw tool and the thin archive tool must be loaded after
# libdeps, so that the scanners they inject can see the library
@@ -1954,7 +2212,7 @@ elif env.TargetOSIs('solaris'):
elif env.TargetOSIs('windows'):
env['LINK_WHOLE_ARCHIVE_LIB_START'] = '/WHOLEARCHIVE'
env['LINK_WHOLE_ARCHIVE_LIB_END'] = ''
- env['LIBDEPS_FLAG_SEPARATORS'] = {env['LINK_WHOLE_ARCHIVE_LIB_START']:{'suffix':':'}}
+ env['LIBDEPS_FLAG_SEPARATORS'] = {env['LINK_WHOLE_ARCHIVE_LIB_START']: {'suffix': ':'}}
if env.TargetOSIs('darwin') and link_model.startswith('dynamic'):
@@ -1971,16 +2229,22 @@ if env.TargetOSIs('darwin') and link_model.startswith('dynamic'):
env['LIBDEPS_TAG_EXPANSIONS'].append(init_no_global_libdeps_tag_expansion)
+
def init_no_global_add_flags(target, start_flag, end_flag):
""" Helper function for init_no_global_libdeps_tag_expand"""
setattr(target[0].attributes, "libdeps_prefix_flags", [start_flag])
setattr(target[0].attributes, "libdeps_postfix_flags", [end_flag])
if env.TargetOSIs('linux', 'freebsd', 'openbsd'):
- setattr(target[0].attributes, "libdeps_switch_flags", [{
- 'on':start_flag,
- 'off':end_flag
- }])
+ setattr(
+ target[0].attributes,
+ "libdeps_switch_flags",
+ [{
+ 'on': start_flag,
+ 'off': end_flag,
+ }],
+ )
+
def init_no_global_libdeps_tag_emitter(target, source, env):
"""
@@ -1997,7 +2261,7 @@ def init_no_global_libdeps_tag_emitter(target, source, env):
# initializers can supply the flag and be wrapped in --as-needed linking,
# allowing the linker to be smart about linking libraries it may not need.
if ("init-no-global-side-effects" in env.get(libdeps.Constants.LibdepsTags, [])
- and not env.TargetOSIs('darwin')):
+ and not env.TargetOSIs('darwin')):
init_no_global_add_flags(target, start_flag, end_flag)
else:
init_no_global_add_flags(target, "", "")
@@ -2016,6 +2280,7 @@ def init_no_global_libdeps_tag_emitter(target, source, env):
init_no_global_add_flags(target, "", "")
return target, source
+
for target_builder in ['SharedLibrary', 'SharedArchive', 'StaticLibrary']:
builder = env['BUILDERS'][target_builder]
base_emitter = builder.emitter
@@ -2023,11 +2288,13 @@ for target_builder in ['SharedLibrary', 'SharedArchive', 'StaticLibrary']:
builder.emitter = new_emitter
link_guard_rules = {
- "test" : ["dist"]
+ "test": ["dist", ],
}
+
class LibdepsLinkGuard(SCons.Errors.UserError):
- pass
+ pass
+
def checkComponentType(target_comps, comp, target, lib):
"""
@@ -2036,14 +2303,15 @@ def checkComponentType(target_comps, comp, target, lib):
"""
for target_comp in target_comps:
for link_guard_rule in link_guard_rules:
- if (target_comp in link_guard_rules[link_guard_rule]
- and link_guard_rule in comp):
- raise LibdepsLinkGuard(textwrap.dedent(f"""\n
+ if (target_comp in link_guard_rules[link_guard_rule] and link_guard_rule in comp):
+ raise LibdepsLinkGuard(
+ textwrap.dedent(f"""\n
LibdepsLinkGuard:
\tTarget '{target[0]}' links LIBDEP '{lib}'
\tbut is listed as AIB_COMPONENT '{target_comp}' which is not allowed link libraries
\twith AIB_COMPONENTS that include the word '{link_guard_rule}'\n"""))
+
def get_comps(env):
"""util function for extracting all AIB_COMPONENTS as a list"""
comps = env.get("AIB_COMPONENTS_EXTRA", [])
@@ -2052,6 +2320,7 @@ def get_comps(env):
comps += [comp]
return comps
+
def link_guard_libdeps_tag_expand(source, target, env, for_signature):
"""
Callback function called on all binaries to check if a certain binary
@@ -2068,6 +2337,7 @@ def link_guard_libdeps_tag_expand(source, target, env, for_signature):
return []
+
env['LIBDEPS_TAG_EXPANSIONS'].append(link_guard_libdeps_tag_expand)
env.Tool('forceincludes')
@@ -2076,8 +2346,7 @@ env.Tool('forceincludes')
if debugBuild:
env.SetConfigHeaderDefine("MONGO_CONFIG_DEBUG_BUILD")
else:
- env.AppendUnique( CPPDEFINES=[ 'NDEBUG' ] )
-
+ env.AppendUnique(CPPDEFINES=['NDEBUG'])
# Normalize our experimental optimiation and hardening flags
selected_experimental_optimizations = set()
@@ -2099,22 +2368,22 @@ for suboption in get_option('experimental-runtime-hardening'):
selected_experimental_runtime_hardenings.add(suboption[1:])
if env.TargetOSIs('linux'):
- env.Append( LIBS=["m"] )
+ env.Append(LIBS=["m"])
if not env.TargetOSIs('android'):
- env.Append( LIBS=["resolv"] )
+ env.Append(LIBS=["resolv"])
elif env.TargetOSIs('solaris'):
- env.Append( LIBS=["socket","resolv","lgrp"] )
+ env.Append(LIBS=["socket", "resolv", "lgrp"])
elif env.TargetOSIs('freebsd'):
- env.Append( LIBS=[ "kvm" ] )
- env.Append( CCFLAGS=[ "-fno-omit-frame-pointer" ] )
+ env.Append(LIBS=["kvm"])
+ env.Append(CCFLAGS=["-fno-omit-frame-pointer"])
elif env.TargetOSIs('darwin'):
- env.Append( LIBS=["resolv"] )
+ env.Append(LIBS=["resolv"])
elif env.TargetOSIs('openbsd'):
- env.Append( LIBS=[ "kvm" ] )
+ env.Append(LIBS=["kvm"])
elif env.TargetOSIs('windows'):
env['DIST_ARCHIVE_SUFFIX'] = '.zip'
@@ -2130,13 +2399,13 @@ elif env.TargetOSIs('windows'):
env['ENV'] = dict(os.environ)
env.Append(CPPDEFINES=[
- # This tells the Windows compiler not to link against the .lib files
- # and to use boost as a bunch of header-only libraries
+ # This tells the Windows compiler not to link against the .lib files
+ # and to use boost as a bunch of header-only libraries
"BOOST_ALL_NO_LIB",
])
- env.Append( CPPDEFINES=[ "_UNICODE" ] )
- env.Append( CPPDEFINES=[ "UNICODE" ] )
+ env.Append(CPPDEFINES=["_UNICODE"])
+ env.Append(CPPDEFINES=["UNICODE"])
# Temporary fixes to allow compilation with VS2017
env.Append(CPPDEFINES=[
@@ -2153,7 +2422,7 @@ elif env.TargetOSIs('windows'):
# /EHsc exception handling style for visual studio
# /W3 warning level
- env.Append(CCFLAGS=["/EHsc","/W3"])
+ env.Append(CCFLAGS=["/EHsc", "/W3"])
# Suppress some warnings we don't like, or find necessary to
# suppress. Please keep this list alphabetized and commented.
@@ -2221,7 +2490,7 @@ elif env.TargetOSIs('windows'):
# mozjs requires the following
# 'declaration' : no matching operator delete found; memory will not be freed if
# initialization throws an exception
- env.Append( CCFLAGS=["/wd4291"] )
+ env.Append(CCFLAGS=["/wd4291"])
# some warnings we should treat as errors:
# c4013
@@ -2238,7 +2507,7 @@ elif env.TargetOSIs('windows'):
# was probably intended as a variable definition. A common example is accidentally
# declaring a function called lock that takes a mutex when one meant to create a guard
# object called lock on the stack.
- env.Append( CCFLAGS=["/we4013", "/we4099", "/we4930"] )
+ env.Append(CCFLAGS=["/we4013", "/we4099", "/we4930"])
env.Append(CPPDEFINES=[
"_CONSOLE",
@@ -2251,7 +2520,7 @@ elif env.TargetOSIs('windows'):
#env.Append( CCFLAGS=['/Yu"pch.h"'] )
# Don't send error reports in case of internal compiler error
- env.Append( CCFLAGS= ["/errorReport:none"] )
+ env.Append(CCFLAGS=["/errorReport:none"])
# Select debugging format. /Zi gives faster links but seems to use more memory.
if get_option('msvc-debugging-format') == "codeview":
@@ -2259,7 +2528,6 @@ elif env.TargetOSIs('windows'):
elif get_option('msvc-debugging-format') == "pdb":
env['CCPDBFLAGS'] = '/Zi /Fd${TARGET}.pdb'
-
# The SCons built-in pdbGenerator always adds /DEBUG, but we would like
# control over that flag so that users can override with /DEBUG:fastlink
# for better local builds. So we overwrite the builtin.
@@ -2268,6 +2536,7 @@ elif env.TargetOSIs('windows'):
return ['/PDB:%s' % target[0].attributes.pdb]
except (AttributeError, IndexError):
return None
+
env['_PDB'] = pdbGenerator
# /DEBUG will tell the linker to create a .pdb file
@@ -2295,42 +2564,42 @@ elif env.TargetOSIs('windows'):
# calls
optStr = "/O2" if not optBuildForSize else "/O1"
- env.Append( CCFLAGS=[optStr, "/Oy-"] )
- env.Append( LINKFLAGS=["/INCREMENTAL:NO"])
+ env.Append(CCFLAGS=[optStr, "/Oy-"])
+ env.Append(LINKFLAGS=["/INCREMENTAL:NO"])
else:
- env.Append( CCFLAGS=["/Od"] )
+ env.Append(CCFLAGS=["/Od"])
if debugBuild and not optBuild:
# /RTC1: - Enable Stack Frame Run-Time Error Checking; Reports when a variable is used
# without having been initialized (implies /Od: no optimizations)
- env.Append( CCFLAGS=["/RTC1"] )
+ env.Append(CCFLAGS=["/RTC1"])
# Support large object files since some unit-test sources contain a lot of code
- env.Append( CCFLAGS=["/bigobj"] )
+ env.Append(CCFLAGS=["/bigobj"])
# Set Source and Executable character sets to UTF-8, this will produce a warning C4828 if the
# file contains invalid UTF-8.
- env.Append( CCFLAGS=["/utf-8" ])
+ env.Append(CCFLAGS=["/utf-8"])
# Specify standards conformance mode to the compiler.
- env.Append( CCFLAGS=["/permissive-"] )
+ env.Append(CCFLAGS=["/permissive-"])
# Enables the __cplusplus preprocessor macro to report an updated value for recent C++ language
# standards support.
- env.Append( CCFLAGS=["/Zc:__cplusplus"] )
+ env.Append(CCFLAGS=["/Zc:__cplusplus"])
# Tells the compiler to preferentially call global operator delete or operator delete[]
# functions that have a second parameter of type size_t when the size of the object is available.
- env.Append( CCFLAGS=["/Zc:sizedDealloc"] )
+ env.Append(CCFLAGS=["/Zc:sizedDealloc"])
# Treat volatile according to the ISO standard and do not guarantee acquire/release semantics.
- env.Append( CCFLAGS=["/volatile:iso"] )
+ env.Append(CCFLAGS=["/volatile:iso"])
# Tell CL to produce more useful error messages.
- env.Append( CCFLAGS=["/diagnostics:caret"] )
+ env.Append(CCFLAGS=["/diagnostics:caret"])
# This gives 32-bit programs 4 GB of user address space in WOW64, ignored in 64-bit builds.
- env.Append( LINKFLAGS=["/LARGEADDRESSAWARE"] )
+ env.Append(LINKFLAGS=["/LARGEADDRESSAWARE"])
env.Append(
LIBS=[
@@ -2348,8 +2617,7 @@ elif env.TargetOSIs('windows'):
'winmm',
'ws2_32',
'secur32',
- ],
- )
+ ], )
# When building on visual studio, this sets the name of the debug symbols file
if env.ToolchainIs('msvc'):
@@ -2385,12 +2653,10 @@ if env.TargetOSIs('posix'):
# prudent to explicitly add that too, so that C language checks
# see a consistent set of definitions.
if env.TargetOSIs('linux'):
- env.AppendUnique(
- CPPDEFINES=[
- ('_XOPEN_SOURCE', 700),
- '_GNU_SOURCE',
- ],
- )
+ env.AppendUnique(CPPDEFINES=[
+ ('_XOPEN_SOURCE', 700),
+ '_GNU_SOURCE',
+ ], )
# If shared and static object files stripped of their rightmost
# dot-delimited suffix would collide, modify the shared library
@@ -2414,12 +2680,15 @@ if env.TargetOSIs('posix'):
)
# -Winvalid-pch Warn if a precompiled header (see Precompiled Headers) is found in the search path but can't be used.
- env.Append( CCFLAGS=["-fasynchronous-unwind-tables",
- "-ggdb" if not env.TargetOSIs('emscripten') else "-g",
- "-Wall",
- "-Wsign-compare",
- "-Wno-unknown-pragmas",
- "-Winvalid-pch"] )
+ env.Append(
+ CCFLAGS=[
+ "-fasynchronous-unwind-tables",
+ "-ggdb" if not env.TargetOSIs('emscripten') else "-g",
+ "-Wall",
+ "-Wsign-compare",
+ "-Wno-unknown-pragmas",
+ "-Winvalid-pch",
+ ], )
# TODO: At least on x86, glibc as of 2.3.4 will consult the
# .eh_frame info via _Unwind_Backtrace to do backtracing without
@@ -2444,7 +2713,8 @@ if env.TargetOSIs('posix'):
# Enabling hidden visibility on non-darwin requires that we have
# libunwind in play, since glibc backtrace will not work
# correctly.
- if "vishidden" in selected_experimental_optimizations and (env.TargetOSIs('darwin') or use_libunwind):
+ if "vishidden" in selected_experimental_optimizations and (env.TargetOSIs('darwin')
+ or use_libunwind):
if link_model.startswith('dynamic'):
# In dynamic mode, we can't make the default visibility
# hidden because not all libraries have export tags. But
@@ -2464,27 +2734,26 @@ if env.TargetOSIs('posix'):
if 'DISALLOW_VISHIDDEN' in env:
return
return "-fvisibility=hidden"
+
env.Append(
CCFLAGS_VISIBILITY_HIDDEN_GENERATOR=conditional_visibility_generator,
CCFLAGS='$CCFLAGS_VISIBILITY_HIDDEN_GENERATOR',
)
# env.Append( " -Wconversion" ) TODO: this doesn't really work yet
- env.Append( CXXFLAGS=["-Woverloaded-virtual"] )
+ env.Append(CXXFLAGS=["-Woverloaded-virtual"])
# On OS X, clang doesn't want the pthread flag at link time, or it
# issues warnings which make it impossible for us to declare link
# warnings as errors. See http://stackoverflow.com/a/19382663.
if not (env.TargetOSIs('darwin') and env.ToolchainIs('clang')):
- env.Append( LINKFLAGS=["-pthread"] )
+ env.Append(LINKFLAGS=["-pthread"])
# SERVER-9761: Ensure early detection of missing symbols in dependent libraries at program
# startup.
- env.Append(
- LINKFLAGS=[
- "-Wl,-bind_at_load" if env.TargetOSIs('macOS') else "-Wl,-z,now",
- ],
- )
+ env.Append(LINKFLAGS=[
+ "-Wl,-bind_at_load" if env.TargetOSIs('macOS') else "-Wl,-z,now",
+ ], )
# We need to use rdynamic for backtraces with glibc unless we have libunwind.
nordyn = (env.TargetOSIs('darwin') or use_libunwind)
@@ -2493,6 +2762,7 @@ if env.TargetOSIs('posix'):
nordyn &= ("nordyn" in selected_experimental_optimizations)
if nordyn:
+
def export_symbol_generator(source, target, env, for_signature):
symbols = copy.copy(env.get('EXPORT_SYMBOLS', []))
for lib in libdeps.get_libdeps(source, target, env, for_signature):
@@ -2500,6 +2770,7 @@ if env.TargetOSIs('posix'):
symbols.extend(lib.env.get('EXPORT_SYMBOLS', []))
export_expansion = '${EXPORT_SYMBOL_FLAG}'
return [f'-Wl,{export_expansion}{symbol}' for symbol in symbols]
+
env['EXPORT_SYMBOL_GEN'] = export_symbol_generator
# For darwin, we need the leading underscore but for others we
@@ -2510,17 +2781,13 @@ if env.TargetOSIs('posix'):
else:
env['EXPORT_SYMBOL_FLAG'] = "--export-dynamic-symbol,"
- env.Append(
- PROGLINKFLAGS=[
- '$EXPORT_SYMBOL_GEN'
- ],
- )
+ env.Append(PROGLINKFLAGS=[
+ '$EXPORT_SYMBOL_GEN',
+ ], )
elif not env.TargetOSIs('darwin'):
- env.Append(
- PROGLINKFLAGS=[
- "-rdynamic",
- ],
- )
+ env.Append(PROGLINKFLAGS=[
+ "-rdynamic",
+ ], )
#make scons colorgcc friendly
for key in ('HOME', 'TERM'):
@@ -2529,10 +2796,12 @@ if env.TargetOSIs('posix'):
except KeyError:
pass
- if has_option( "gcov" ):
+ if has_option("gcov"):
if not (env.TargetOSIs('linux') and (env.ToolchainIs('gcc', 'clang'))):
# TODO: This should become supported under: https://jira.mongodb.org/browse/SERVER-49877
- env.FatalError("Coverage option 'gcov' is currently only supported on linux with gcc and clang. See SERVER-49877.")
+ env.FatalError(
+ "Coverage option 'gcov' is currently only supported on linux with gcc and clang. See SERVER-49877."
+ )
env.AppendUnique(
CCFLAGS=['--coverage'],
@@ -2540,11 +2809,11 @@ if env.TargetOSIs('posix'):
)
if optBuild and not optBuildForSize:
- env.Append( CCFLAGS=["-O3" if "O3" in selected_experimental_optimizations else "-O2"] )
+ env.Append(CCFLAGS=["-O3" if "O3" in selected_experimental_optimizations else "-O2"])
elif optBuild and optBuildForSize:
- env.Append( CCFLAGS=["-Os"] )
+ env.Append(CCFLAGS=["-Os"])
else:
- env.Append( CCFLAGS=["-O0"] )
+ env.Append(CCFLAGS=["-O0"])
if optBuild and "treevec" in selected_experimental_optimizations:
env.Append(CCFLAGS=["-ftree-vectorize"])
@@ -2555,7 +2824,7 @@ if get_option('wiredtiger') == 'on':
# so disable WiredTiger automatically on 32-bit since wiredtiger is on by default
if env['TARGET_ARCH'] == 'i386':
env.FatalError("WiredTiger is not supported on 32-bit platforms\n"
- "Re-run scons with --wiredtiger=off to build on 32-bit platforms")
+ "Re-run scons with --wiredtiger=off to build on 32-bit platforms")
else:
wiredtiger = True
env.SetConfigHeaderDefine("MONGO_CONFIG_WIREDTIGER_ENABLED")
@@ -2567,7 +2836,6 @@ if get_option('ocsp-stapling') == 'on':
# disable OCSP Stapling on Ubuntu 18.04 machines. See SERVER-51364 for more details.
env.SetConfigHeaderDefine("MONGO_CONFIG_OCSP_STAPLING_ENABLED")
-
if not env.TargetOSIs('windows', 'macOS') and (env.ToolchainIs('GCC', 'clang')):
# By default, apply our current microarchitecture minima. If the
@@ -2583,10 +2851,10 @@ if not env.TargetOSIs('windows', 'macOS') and (env.ToolchainIs('GCC', 'clang')):
# CXXFLAGS.
default_targeting_flags_for_architecture = {
- "aarch64" : { "-march=" : "armv8.2-a", "-mtune=" : "generic" },
- "i386" : { "-march=" : "nocona", "-mtune=" : "generic" },
- "ppc64le" : { "-mcpu=" : "power8", "-mtune=" : "power8", "-mcmodel=" : "medium" },
- "s390x" : { "-march=" : "z196", "-mtune=" : "zEC12" },
+ "aarch64": {"-march=": "armv8.2-a", "-mtune=": "generic"},
+ "i386": {"-march=": "nocona", "-mtune=": "generic"},
+ "ppc64le": {"-mcpu=": "power8", "-mtune=": "power8", "-mcmodel=": "medium"},
+ "s390x": {"-march=": "z196", "-mtune=": "zEC12"},
}
# If we are enabling vectorization in sandybridge mode, we'd
@@ -2594,23 +2862,25 @@ if not env.TargetOSIs('windows', 'macOS') and (env.ToolchainIs('GCC', 'clang')):
# heavy versions can cause clock speed reductions.
if "sandybridge" in selected_experimental_optimizations:
default_targeting_flags_for_architecture["x86_64"] = {
- "-march=" : "sandybridge",
- "-mtune=" : "generic",
- "-mprefer-vector-width=" : "128",
+ "-march=": "sandybridge",
+ "-mtune=": "generic",
+ "-mprefer-vector-width=": "128",
}
default_targeting_flags = default_targeting_flags_for_architecture.get(env['TARGET_ARCH'])
if default_targeting_flags:
search_variables = ['CCFLAGS', 'CFLAGS', 'CXXFLAGS']
for targeting_flag, targeting_flag_value in default_targeting_flags.items():
- if not any(flag_value.startswith(targeting_flag) for search_variable in search_variables for flag_value in env[search_variable]):
+ if not any(
+ flag_value.startswith(targeting_flag) for search_variable in search_variables
+ for flag_value in env[search_variable]):
env.Append(CCFLAGS=[f'{targeting_flag}{targeting_flag_value}'])
# Needed for auth tests since key files are stored in git with mode 644.
if not env.TargetOSIs('windows'):
- for keysuffix in [ "1" , "2", "ForRollover" ]:
+ for keysuffix in ["1", "2", "ForRollover"]:
keyfile = "jstests/libs/key%s" % keysuffix
- os.chmod( keyfile , stat.S_IWUSR|stat.S_IRUSR )
+ os.chmod(keyfile, stat.S_IWUSR | stat.S_IRUSR)
# boostSuffixList is used when using system boost to select a search sequence
# for boost libraries.
@@ -2618,7 +2888,7 @@ boostSuffixList = ["-mt", ""]
if get_option("system-boost-lib-search-suffixes") is not None:
if not use_system_version_of_library("boost"):
env.FatalError("The --system-boost-lib-search-suffixes option is only valid "
- "with --use-system-boost")
+ "with --use-system-boost")
boostSuffixList = get_option("system-boost-lib-search-suffixes")
if boostSuffixList == "":
boostSuffixList = []
@@ -2631,16 +2901,19 @@ mongo_modules = moduleconfig.discover_modules('src/mongo/db/modules', get_option
if get_option('ninja') != 'disabled':
for module in mongo_modules:
if hasattr(module, 'NinjaFile'):
- env.FatalError(textwrap.dedent("""\
+ env.FatalError(
+ textwrap.dedent("""\
ERROR: Ninja tool option '--ninja' should not be used with the ninja module.
Remove the ninja module directory or use '--modules= ' to select no modules.
- If using enterprise module, explicitly set '--modules=<name-of-enterprise-module>' to exclude the ninja module."""))
+ If using enterprise module, explicitly set '--modules=<name-of-enterprise-module>' to exclude the ninja module."""
+ ))
# --- check system ---
ssl_provider = None
free_monitoring = get_option("enable-free-mon")
http_client = get_option("enable-http-client")
+
def isSanitizerEnabled(self, sanitizerName):
if 'SANITIZERS_ENABLED' not in self:
return False
@@ -2648,8 +2921,10 @@ def isSanitizerEnabled(self, sanitizerName):
return 'fuzzer-no-link' in self['SANITIZERS_ENABLED']
return sanitizerName in self['SANITIZERS_ENABLED']
+
env.AddMethod(isSanitizerEnabled, 'IsSanitizerEnabled')
+
def doConfigure(myenv):
global wiredtiger
global ssl_provider
@@ -2663,8 +2938,7 @@ def doConfigure(myenv):
# work with the flags we have selected.
if myenv.ToolchainIs('msvc'):
compiler_minimum_string = "Microsoft Visual Studio 2022 17.0"
- compiler_test_body = textwrap.dedent(
- """
+ compiler_test_body = textwrap.dedent("""
#if !defined(_MSC_VER)
#error
#endif
@@ -2680,8 +2954,7 @@ def doConfigure(myenv):
elif myenv.ToolchainIs('gcc'):
if get_option('cxx-std') == "20":
compiler_minimum_string = "GCC 11.2"
- compiler_test_body = textwrap.dedent(
- """
+ compiler_test_body = textwrap.dedent("""
#if !defined(__GNUC__) || defined(__clang__)
#error
#endif
@@ -2696,8 +2969,7 @@ def doConfigure(myenv):
""" % compiler_minimum_string)
else:
compiler_minimum_string = "GCC 8.2"
- compiler_test_body = textwrap.dedent(
- """
+ compiler_test_body = textwrap.dedent("""
#if !defined(__GNUC__) || defined(__clang__)
#error
#endif
@@ -2713,8 +2985,7 @@ def doConfigure(myenv):
elif myenv.ToolchainIs('clang'):
if get_option('cxx-std') == "20":
compiler_minimum_string = "clang 12.0 (or Apple XCode 13.0)"
- compiler_test_body = textwrap.dedent(
- """
+ compiler_test_body = textwrap.dedent("""
#if !defined(__clang__)
#error
#endif
@@ -2733,8 +3004,7 @@ def doConfigure(myenv):
""" % (compiler_minimum_string, compiler_minimum_string))
else:
compiler_minimum_string = "clang 7.0 (or Apple XCode 13.0)"
- compiler_test_body = textwrap.dedent(
- """
+ compiler_test_body = textwrap.dedent("""
#if !defined(__clang__)
#error
#endif
@@ -2757,18 +3027,22 @@ def doConfigure(myenv):
def CheckForMinimumCompiler(context, language):
extension_for = {
- "C" : ".c",
- "C++" : ".cpp",
+ "C": ".c",
+ "C++": ".cpp",
}
- context.Message("Checking if %s compiler is %s or newer..." %
- (language, compiler_minimum_string))
+ context.Message(
+ "Checking if %s compiler is %s or newer..." % (language, compiler_minimum_string))
result = context.TryCompile(compiler_test_body, extension_for[language])
context.Result(result)
- return result;
+ return result
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckForMinimumCompiler' : CheckForMinimumCompiler,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckForMinimumCompiler': CheckForMinimumCompiler,
+ },
+ )
c_compiler_validated = conf.CheckForMinimumCompiler('C')
cxx_compiler_validated = conf.CheckForMinimumCompiler('C++')
@@ -2794,9 +3068,9 @@ def doConfigure(myenv):
env['WIN_VERSION_MIN'] = win_version_min
win_version_min = win_version_min_choices[win_version_min]
- env.Append( CPPDEFINES=[("_WIN32_WINNT", "0x" + win_version_min[0])] )
- env.Append( CPPDEFINES=[("BOOST_USE_WINAPI_VERSION", "0x" + win_version_min[0])] )
- env.Append( CPPDEFINES=[("NTDDI_VERSION", "0x" + win_version_min[0] + win_version_min[1])] )
+ env.Append(CPPDEFINES=[("_WIN32_WINNT", "0x" + win_version_min[0])])
+ env.Append(CPPDEFINES=[("BOOST_USE_WINAPI_VERSION", "0x" + win_version_min[0])])
+ env.Append(CPPDEFINES=[("NTDDI_VERSION", "0x" + win_version_min[0] + win_version_min[1])])
conf.Finish()
@@ -2816,14 +3090,19 @@ def doConfigure(myenv):
#endif
"""
- context.Message("Checking for sufficient {0} target version minimum... ".format(context.env['TARGET_OS']))
+ context.Message("Checking for sufficient {0} target version minimum... ".format(
+ context.env['TARGET_OS']))
ret = context.TryCompile(textwrap.dedent(test_body), ".c")
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests={
- "CheckDarwinMinima" : CheckDarwinMinima,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ "CheckDarwinMinima": CheckDarwinMinima,
+ },
+ )
if not conf.CheckDarwinMinima():
conf.env.ConfError("Required target minimum of macOS 10.14 not found")
@@ -2897,9 +3176,13 @@ def doConfigure(myenv):
LINKFLAGS=["$LINKFLAGS_WERROR"],
)
- conf = Configure(cloned, help=False, custom_tests = {
- 'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag)
- })
+ conf = Configure(
+ cloned,
+ help=False,
+ custom_tests={
+ 'CheckFlag': lambda ctx: CheckFlagTest(ctx, tool, extension, flag),
+ },
+ )
available = conf.CheckFlag()
conf.Finish()
if available:
@@ -2908,22 +3191,27 @@ def doConfigure(myenv):
def AddToCFLAGSIfSupported(env, flag):
return AddFlagIfSupported(env, 'C', '.c', flag, False, CFLAGS=[flag])
+
env.AddMethod(AddToCFLAGSIfSupported)
def AddToCCFLAGSIfSupported(env, flag):
return AddFlagIfSupported(env, 'C', '.c', flag, False, CCFLAGS=[flag])
+
env.AddMethod(AddToCCFLAGSIfSupported)
def AddToCXXFLAGSIfSupported(env, flag):
return AddFlagIfSupported(env, 'C++', '.cpp', flag, False, CXXFLAGS=[flag])
+
env.AddMethod(AddToCXXFLAGSIfSupported)
def AddToLINKFLAGSIfSupported(env, flag):
return AddFlagIfSupported(env, 'C', '.c', flag, True, LINKFLAGS=[flag])
+
env.AddMethod(AddToLINKFLAGSIfSupported)
def AddToSHLINKFLAGSIfSupported(env, flag):
return AddFlagIfSupported(env, 'C', '.c', flag, True, SHLINKFLAGS=[flag])
+
env.AddMethod(AddToSHLINKFLAGSIfSupported)
if myenv.ToolchainIs('gcc', 'clang'):
@@ -2931,7 +3219,8 @@ def doConfigure(myenv):
# because it is much faster. Don't use it if the user has already configured another linker
# selection manually.
if any(flag.startswith('-fuse-ld=') for flag in env['LINKFLAGS']):
- myenv.FatalError(f"Use the '--linker' option instead of modifying the LINKFLAGS directly.")
+ myenv.FatalError(
+ f"Use the '--linker' option instead of modifying the LINKFLAGS directly.")
linker_ld = get_option('linker')
if linker_ld == 'auto':
@@ -2963,28 +3252,34 @@ def doConfigure(myenv):
if has_option('gcov') and AddToCCFLAGSIfSupported(myenv, '-fprofile-update=single'):
myenv.AppendUnique(LINKFLAGS=['-fprofile-update=single'])
- detectCompiler = Configure(myenv, help=False, custom_tests = {
- 'CheckForCXXLink': CheckForCXXLink,
- })
+ detectCompiler = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckForCXXLink': CheckForCXXLink,
+ },
+ )
if not detectCompiler.CheckCC():
env.ConfError(
"C compiler {0} doesn't work",
- detectEnv['CC'])
+ detectEnv['CC'],
+ )
if not detectCompiler.CheckCXX():
env.ConfError(
"C++ compiler {0} doesn't work",
- detectEnv['CXX'])
+ detectEnv['CXX'],
+ )
if not detectCompiler.CheckForCXXLink():
env.ConfError(
"C++ compiler {0} can't link C++ programs",
- detectEnv['CXX'])
+ detectEnv['CXX'],
+ )
detectCompiler.Finish()
-
if myenv.ToolchainIs('clang', 'gcc'):
# This warning was added in g++-4.8.
AddToCCFLAGSIfSupported(myenv, '-Wno-unused-local-typedefs')
@@ -3112,17 +3407,19 @@ def doConfigure(myenv):
return ret
myenvClone = myenv.Clone()
- myenvClone.Append(
- CCFLAGS=[
- '$CCFLAGS_WERROR',
- '-Wnon-virtual-dtor',
- ]
+ myenvClone.Append(CCFLAGS=[
+ '$CCFLAGS_WERROR',
+ '-Wnon-virtual-dtor',
+ ], )
+ conf = Configure(
+ myenvClone,
+ help=False,
+ custom_tests={
+ 'CheckNonVirtualDtor': CheckNonVirtualDtor,
+ },
)
- conf = Configure(myenvClone, help=False, custom_tests = {
- 'CheckNonVirtualDtor' : CheckNonVirtualDtor,
- })
if conf.CheckNonVirtualDtor():
- myenv.Append( CXXFLAGS=["-Wnon-virtual-dtor"] )
+ myenv.Append(CXXFLAGS=["-Wnon-virtual-dtor"])
conf.Finish()
# As of XCode 9, this flag must be present (it is not enabled
@@ -3137,22 +3434,18 @@ def doConfigure(myenv):
# might not link libssp when we need to (see SERVER-12456).
if myenv.ToolchainIs('gcc', 'clang'):
if AddToCCFLAGSIfSupported(myenv, '-fstack-protector-strong'):
- myenv.Append(
- LINKFLAGS=[
- '-fstack-protector-strong',
- ]
- )
+ myenv.Append(LINKFLAGS=[
+ '-fstack-protector-strong',
+ ], )
elif AddToCCFLAGSIfSupported(myenv, '-fstack-protector-all'):
- myenv.Append(
- LINKFLAGS=[
- '-fstack-protector-all',
- ]
- )
+ myenv.Append(LINKFLAGS=[
+ '-fstack-protector-all',
+ ], )
if 'cfex' in selected_experimental_runtime_hardenings:
- myenv.Append(
- CFLAGS=["-fexceptions"]
- )
+ myenv.Append(CFLAGS=[
+ "-fexceptions",
+ ], )
if 'stackclash' in selected_experimental_runtime_hardenings:
AddToCCFLAGSIfSupported(myenv, "-fstack-clash-protection")
@@ -3185,7 +3478,7 @@ def doConfigure(myenv):
pass
if has_option('osx-version-min'):
- message="""
+ message = """
The --osx-version-min option is no longer supported.
To specify a target minimum for Darwin platforms, please explicitly add the appropriate options
@@ -3204,8 +3497,9 @@ def doConfigure(myenv):
if AddToCXXFLAGSIfSupported(myenv, '-stdlib=libc++'):
myenv.Append(LINKFLAGS=['-stdlib=libc++'])
else:
- myenv.ConfError('libc++ requested, but compiler does not support -stdlib=libc++' )
+ myenv.ConfError('libc++ requested, but compiler does not support -stdlib=libc++')
else:
+
def CheckLibStdCxx(context):
test_body = """
#include <vector>
@@ -3219,9 +3513,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckLibStdCxx' : CheckLibStdCxx,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckLibStdCxx': CheckLibStdCxx,
+ },
+ )
usingLibStdCxx = conf.CheckLibStdCxx()
conf.Finish()
@@ -3238,12 +3536,11 @@ def doConfigure(myenv):
if not AddToCXXFLAGSIfSupported(myenv, '-std=c++20'):
myenv.ConfError('Compiler does not honor -std=c++20')
-
if not AddToCFLAGSIfSupported(myenv, '-std=c11'):
myenv.ConfError("C++17 mode selected for C++ files, but can't enable C11 for C files")
if using_system_version_of_cxx_libraries():
- print( 'WARNING: System versions of C++ libraries must be compiled with C++17 support' )
+ print('WARNING: System versions of C++ libraries must be compiled with C++17 support')
def CheckCxx17(context):
test_body = """
@@ -3272,10 +3569,14 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckCxx17' : CheckCxx17,
- 'CheckCxx20' : CheckCxx20,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckCxx17': CheckCxx17,
+ 'CheckCxx20': CheckCxx20,
+ },
+ )
if get_option('cxx-std') == "17" and not conf.CheckCxx17():
myenv.ConfError('C++17 support is required to build MongoDB')
@@ -3300,9 +3601,12 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(env, custom_tests = {
- 'CheckMemset_s' : CheckMemset_s,
- })
+ conf = Configure(
+ env,
+ custom_tests={
+ 'CheckMemset_s': CheckMemset_s,
+ },
+ )
if conf.CheckMemset_s():
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_HAVE_MEMSET_S")
@@ -3325,6 +3629,7 @@ def doConfigure(myenv):
# wasn't introduced until libstdc++ 5.3.0. Yes, this is a terrible
# hack.
if usingLibStdCxx:
+
def CheckModernLibStdCxx(context):
test_body = """
#if !__has_include(<experimental/filesystem>)
@@ -3337,13 +3642,18 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckModernLibStdCxx' : CheckModernLibStdCxx,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckModernLibStdCxx': CheckModernLibStdCxx,
+ },
+ )
suppress_invalid = has_option("disable-minimum-compiler-version-enforcement")
if not conf.CheckModernLibStdCxx() and not suppress_invalid:
- myenv.ConfError("When using libstdc++, MongoDB requires libstdc++ from GCC 5.3.0 or newer")
+ myenv.ConfError(
+ "When using libstdc++, MongoDB requires libstdc++ from GCC 5.3.0 or newer")
conf.Finish()
@@ -3355,14 +3665,15 @@ def doConfigure(myenv):
myenv.FatalError("--use-glibcxx-debug requires --dbg=on")
if not usingLibStdCxx:
myenv.FatalError("--use-glibcxx-debug is only compatible with the GNU implementation "
- "of the C++ standard libary")
+ "of the C++ standard libary")
if using_system_version_of_cxx_libraries():
myenv.FatalError("--use-glibcxx-debug not compatible with system versions of "
- "C++ libraries.")
- myenv.Append(CPPDEFINES=["_GLIBCXX_DEBUG"]);
+ "C++ libraries.")
+ myenv.Append(CPPDEFINES=["_GLIBCXX_DEBUG"])
# Check if we have a modern Windows SDK
if env.TargetOSIs('windows'):
+
def CheckWindowsSDKVersion(context):
test_body = """
@@ -3377,9 +3688,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckWindowsSDKVersion' : CheckWindowsSDKVersion,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckWindowsSDKVersion': CheckWindowsSDKVersion,
+ },
+ )
if not conf.CheckWindowsSDKVersion():
myenv.ConfError('Windows SDK Version 8.1 or higher is required to build MongoDB')
@@ -3403,9 +3718,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckPosixSystem' : CheckPosixSystem,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckPosixSystem': CheckPosixSystem,
+ },
+ )
posix_system = conf.CheckPosixSystem()
conf.Finish()
@@ -3414,6 +3733,7 @@ def doConfigure(myenv):
# and the "monotonic" clock.
posix_monotonic_clock = False
if posix_system:
+
def CheckPosixMonotonicClock(context):
test_body = """
@@ -3430,9 +3750,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckPosixMonotonicClock' : CheckPosixMonotonicClock,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckPosixMonotonicClock': CheckPosixMonotonicClock,
+ },
+ )
posix_monotonic_clock = conf.CheckPosixMonotonicClock()
# On 32-bit systems, we need to define this in order to get access to
@@ -3470,6 +3794,7 @@ def doConfigure(myenv):
env.FatalError("Cannot use --sanitize=address with tcmalloc")
if using_fsan:
+
def CheckForFuzzerCompilerSupport(context):
test_body = """
@@ -3484,19 +3809,30 @@ def doConfigure(myenv):
context.Message("Checking if libfuzzer is supported by the compiler... ")
- context.env.AppendUnique(LINKFLAGS=['-fprofile-instr-generate',
- '-fcoverage-mapping',
- '-fsanitize=fuzzer'],
- CCFLAGS=['-fprofile-instr-generate','-fcoverage-mapping'])
+ context.env.AppendUnique(
+ LINKFLAGS=[
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
+ '-fsanitize=fuzzer',
+ ],
+ CCFLAGS=[
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
+ ],
+ )
ret = context.TryLink(textwrap.dedent(test_body), ".cpp")
context.Result(ret)
return ret
confEnv = myenv.Clone()
- fuzzerConf = Configure(confEnv, help=False, custom_tests = {
+ fuzzerConf = Configure(
+ confEnv,
+ help=False,
+ custom_tests={
'CheckForFuzzerCompilerSupport': CheckForFuzzerCompilerSupport,
- })
+ },
+ )
if not fuzzerConf.CheckForFuzzerCompilerSupport():
myenv.FatalError("libfuzzer is not supported by the compiler")
fuzzerConf.Finish()
@@ -3507,8 +3843,14 @@ def doConfigure(myenv):
sanitizer_list.remove('fuzzer')
sanitizer_list.append('fuzzer-no-link')
# These flags are needed to generate a coverage report
- myenv.Append(LINKFLAGS=['-fprofile-instr-generate','-fcoverage-mapping'])
- myenv.Append(CCFLAGS=['-fprofile-instr-generate','-fcoverage-mapping'])
+ myenv.Append(LINKFLAGS=[
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
+ ], )
+ myenv.Append(CCFLAGS=[
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
+ ], )
sanitizer_option = '-fsanitize=' + ','.join(sanitizer_list)
@@ -3516,23 +3858,23 @@ def doConfigure(myenv):
myenv.Append(LINKFLAGS=[sanitizer_option])
myenv.Append(CCFLAGS=['-fno-omit-frame-pointer'])
else:
- myenv.ConfError('Failed to enable sanitizers with flag: {0}', sanitizer_option )
+ myenv.ConfError('Failed to enable sanitizers with flag: {0}', sanitizer_option)
myenv['SANITIZERS_ENABLED'] = sanitizer_list
if has_option('sanitize-coverage') and using_fsan:
sanitize_coverage_list = get_option('sanitize-coverage')
sanitize_coverage_option = '-fsanitize-coverage=' + sanitize_coverage_list
- if AddToCCFLAGSIfSupported(myenv,sanitize_coverage_option):
+ if AddToCCFLAGSIfSupported(myenv, sanitize_coverage_option):
myenv.Append(LINKFLAGS=[sanitize_coverage_option])
else:
- myenv.ConfError('Failed to enable -fsanitize-coverage with flag: {0}', sanitize_coverage_option )
-
+ myenv.ConfError('Failed to enable -fsanitize-coverage with flag: {0}',
+ sanitize_coverage_option)
denyfiles_map = {
- "address" : myenv.File("#etc/asan.denylist"),
- "thread" : myenv.File("#etc/tsan.denylist"),
- "undefined" : myenv.File("#etc/ubsan.denylist"),
+ "address": myenv.File("#etc/asan.denylist"),
+ "thread": myenv.File("#etc/tsan.denylist"),
+ "undefined": myenv.File("#etc/ubsan.denylist"),
}
# Select those unique deny files that are associated with the
@@ -3557,9 +3899,8 @@ def doConfigure(myenv):
if supportedDenyfiles:
# Unconditionally using the full path can affect SCons cached builds, so we only do
# this in cases where we know it's going to matter.
- denylist_options=[
- f"-fsanitize-blacklist={denyfile.path}"
- for denyfile in supportedDenyfiles
+ denylist_options = [
+ f"-fsanitize-blacklist={denyfile.path}" for denyfile in supportedDenyfiles
]
if 'ICECC' in env and env['ICECC']:
@@ -3569,11 +3910,9 @@ def doConfigure(myenv):
# toolchain archive. Local builds remain relative.
local_denylist_options = denylist_options[:]
denylist_options = [
- f"-fsanitize-blacklist={denyfile.abspath}"
- for denyfile in supportedDenyfiles
+ f"-fsanitize-blacklist={denyfile.abspath}" for denyfile in supportedDenyfiles
]
-
# Build a regex of all the regexes in the denylist
# the regex in the denylist are a shell wildcard format
# https://clang.llvm.org/docs/SanitizerSpecialCaseList.html#format
@@ -3586,7 +3925,7 @@ def doConfigure(myenv):
regex_line = re.escape(regex_line)
icecc_denylist_regexes += [regex_line.replace('\\*', ".*")]
- icecc_denylist_regex = re.compile('^(?:' + '|'.join(icecc_denylist_regexes) + ')$')
+ icecc_denylist_regex = re.compile('^(?:' + '|'.join(icecc_denylist_regexes) + ')$')
def is_local_compile(env, target, source, for_signature):
return icecc_denylist_regex.match(str(source[0])) is not None
@@ -3622,7 +3961,8 @@ def doConfigure(myenv):
# Check if the denylist gets a match and if so it will be local
# build and should use the non-abspath.
# NOTE: in non icecream builds denylist_options becomes relative paths.
- if env.subst('$ICECC_LOCAL_COMPILATION_FILTER', target=target, source=source) == 'True':
+ if env.subst('$ICECC_LOCAL_COMPILATION_FILTER', target=target,
+ source=source) == 'True':
return local_denylist_options
return denylist_options
@@ -3658,7 +3998,9 @@ def doConfigure(myenv):
symbolizer_option = f":external_symbolizer_path=\"{llvm_symbolizer}\""
elif using_asan or using_tsan or using_ubsan:
- myenv.FatalError("The address, thread, and undefined behavior sanitizers require llvm-symbolizer for meaningful reports. Please set LLVM_SYMBOLIZER to the path to llvm-symbolizer in your SCons invocation")
+ myenv.FatalError(
+ "The address, thread, and undefined behavior sanitizers require llvm-symbolizer for meaningful reports. Please set LLVM_SYMBOLIZER to the path to llvm-symbolizer in your SCons invocation"
+ )
if using_asan:
# Unfortunately, abseil requires that we make these macros
@@ -3688,7 +4030,9 @@ def doConfigure(myenv):
# logic is already complex, and it feels better to
# make it explicit that using TSAN means you won't get
# the benefits of libunwind. Fixing this is:
- env.FatalError("Cannot use libunwind with TSAN, please add --use-libunwind=off to your compile flags")
+ env.FatalError(
+ "Cannot use libunwind with TSAN, please add --use-libunwind=off to your compile flags"
+ )
# If anything is changed, added, or removed in
# tsan_options, be sure to make the corresponding changes
@@ -3741,14 +4085,12 @@ def doConfigure(myenv):
if myenv.ToolchainIs('msvc') and optBuild:
# http://blogs.msdn.com/b/vcblog/archive/2013/09/11/introducing-gw-compiler-switch.aspx
#
- myenv.Append( CCFLAGS=["/Gw", "/Gy"] )
- myenv.Append( LINKFLAGS=["/OPT:REF"])
+ myenv.Append(CCFLAGS=["/Gw", "/Gy"])
+ myenv.Append(LINKFLAGS=["/OPT:REF"])
# http://blogs.msdn.com/b/vcblog/archive/2014/03/25/linker-enhancements-in-visual-studio-2013-update-2-ctp2.aspx
#
- myenv.Append( CCFLAGS=["/Zc:inline"])
-
-
+ myenv.Append(CCFLAGS=["/Zc:inline"])
if myenv.ToolchainIs('gcc', 'clang'):
# Usually, --gdb-index is too expensive in big static binaries, but for dynamic
@@ -3777,9 +4119,12 @@ def doConfigure(myenv):
# additional details.
if has_option('detect-odr-violations'):
if myenv.ToolchainIs('clang') and usingLibStdCxx:
- env.FatalError('The --detect-odr-violations flag does not work with clang and libstdc++')
+ env.FatalError(
+ 'The --detect-odr-violations flag does not work with clang and libstdc++')
if optBuild:
- env.FatalError('The --detect-odr-violations flag is expected to only be reliable with --opt=off')
+ env.FatalError(
+ 'The --detect-odr-violations flag is expected to only be reliable with --opt=off'
+ )
AddToLINKFLAGSIfSupported(myenv, '-Wl,--detect-odr-violations')
# Disallow an executable stack. Also, issue a warning if any files are found that would
@@ -3820,12 +4165,14 @@ def doConfigure(myenv):
if link_model.startswith("dynamic"):
debug_compress.append("ld")
- compress_type="zlib-gabi"
- compress_flag="compress-debug-sections"
+ compress_type = "zlib-gabi"
+ compress_flag = "compress-debug-sections"
AddToCCFLAGSIfSupported(
myenv,
- f"-Wa,--{compress_flag}={compress_type}" if "as" in debug_compress else f"-Wa,--no{compress_flag}")
+ f"-Wa,--{compress_flag}={compress_type}"
+ if "as" in debug_compress else f"-Wa,--no{compress_flag}",
+ )
# We shouldn't enable debug compression in the linker
# (meaning our final binaries contain compressed debug
@@ -3849,9 +4196,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckElfHForSHF_COMPRESSED' : CheckElfHForSHF_COMPRESSED,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckElfHForSHF_COMPRESSED': CheckElfHForSHF_COMPRESSED,
+ },
+ )
have_shf_compressed = conf.CheckElfHForSHF_COMPRESSED()
conf.Finish()
@@ -3859,11 +4210,13 @@ def doConfigure(myenv):
if have_shf_compressed and 'ld' in debug_compress:
AddToLINKFLAGSIfSupported(
myenv,
- f"-Wl,--{compress_flag}={compress_type}")
+ f"-Wl,--{compress_flag}={compress_type}",
+ )
else:
AddToLINKFLAGSIfSupported(
myenv,
- f"-Wl,--{compress_flag}=none")
+ f"-Wl,--{compress_flag}=none",
+ )
if "fnsi" in selected_experimental_optimizations:
AddToCCFLAGSIfSupported(myenv, "-fno-semantic-interposition")
@@ -3890,7 +4243,7 @@ def doConfigure(myenv):
if not AddToCCFLAGSIfSupported(myenv, '-flto') or \
not AddToLINKFLAGSIfSupported(myenv, '-flto'):
myenv.ConfError("Link time optimization requested, "
- "but selected compiler does not honor -flto" )
+ "but selected compiler does not honor -flto")
if myenv.TargetOSIs('darwin'):
AddToLINKFLAGSIfSupported(myenv, '-Wl,-object_path_lto,${TARGET}.lto')
@@ -3906,7 +4259,7 @@ def doConfigure(myenv):
# https://gforge.inria.fr/tracker/?func=detail&group_id=131&atid=607&aid=14070
# https://github.com/jedisct1/libsodium/issues/202
def CheckForGlibcKnownToSupportFortify(context):
- test_body="""
+ test_body = """
#include <features.h>
#if !__GLIBC_PREREQ(2, 11)
#error
@@ -3917,9 +4270,13 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'CheckForFortify': CheckForGlibcKnownToSupportFortify,
- })
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'CheckForFortify': CheckForGlibcKnownToSupportFortify,
+ },
+ )
# Fortify only possibly makes sense on POSIX systems, and we know that clang is not a valid
# combination:
@@ -3927,11 +4284,9 @@ def doConfigure(myenv):
# http://lists.llvm.org/pipermail/cfe-dev/2015-November/045852.html
#
if env.TargetOSIs('posix') and not env.ToolchainIs('clang') and conf.CheckForFortify():
- conf.env.Append(
- CPPDEFINES=[
- ('_FORTIFY_SOURCE', 2),
- ],
- )
+ conf.env.Append(CPPDEFINES=[
+ ('_FORTIFY_SOURCE', 2),
+ ], )
myenv = conf.Finish()
@@ -3945,8 +4300,9 @@ def doConfigure(myenv):
# These headers are only fully standards-compliant on POSIX platforms. Windows
# in particular doesn't implement inttypes.h
if env.TargetOSIs('posix'):
+
def NeedStdCLimitMacros(context):
- test_body="""
+ test_body = """
#undef __STDC_LIMIT_MACROS
#include <stdint.h>
#if defined(INT64_MAX)
@@ -3959,7 +4315,7 @@ def doConfigure(myenv):
return ret
def NeedStdCConstantMacros(context):
- test_body="""
+ test_body = """
#undef __STDC_CONSTANT_MACROS
#include <stdint.h>
#if defined(INTMAX_C)
@@ -3972,7 +4328,7 @@ def doConfigure(myenv):
return ret
def NeedStdCFormatMacros(context):
- test_body="""
+ test_body = """
#undef __STDC_FORMAT_MACROS
#include <inttypes.h>
#if defined(PRIx64)
@@ -3984,20 +4340,22 @@ def doConfigure(myenv):
context.Result(ret)
return ret
- conf = Configure(myenv, help=False, custom_tests = {
- 'NeedStdCLimitMacros': NeedStdCLimitMacros,
- 'NeedStdCConstantMacros': NeedStdCConstantMacros,
- 'NeedStdCFormatMacros': NeedStdCFormatMacros,
- })
-
- conf.env.AppendUnique(
- CPPDEFINES=[
- '__STDC_LIMIT_MACROS' if conf.NeedStdCLimitMacros() else '',
- '__STDC_CONSTANT_MACROS' if conf.NeedStdCConstantMacros() else '',
- '__STDC_FORMAT_MACROS' if conf.NeedStdCFormatMacros() else '',
- ]
+ conf = Configure(
+ myenv,
+ help=False,
+ custom_tests={
+ 'NeedStdCLimitMacros': NeedStdCLimitMacros,
+ 'NeedStdCConstantMacros': NeedStdCConstantMacros,
+ 'NeedStdCFormatMacros': NeedStdCFormatMacros,
+ },
)
+ conf.env.AppendUnique(CPPDEFINES=[
+ '__STDC_LIMIT_MACROS' if conf.NeedStdCLimitMacros() else '',
+ '__STDC_CONSTANT_MACROS' if conf.NeedStdCConstantMacros() else '',
+ '__STDC_FORMAT_MACROS' if conf.NeedStdCFormatMacros() else '',
+ ])
+
myenv = conf.Finish()
# We set this with GCC on x86 platforms to work around
@@ -4027,9 +4385,12 @@ def doConfigure(myenv):
context.Result(result)
return result
- conf = Configure(myenv, custom_tests = {
- 'CheckPThreadSetNameNP': CheckPThreadSetNameNP,
- })
+ conf = Configure(
+ myenv,
+ custom_tests={
+ 'CheckPThreadSetNameNP': CheckPThreadSetNameNP,
+ },
+ )
if conf.CheckPThreadSetNameNP():
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_HAVE_PTHREAD_SETNAME_NP")
@@ -4050,9 +4411,12 @@ def doConfigure(myenv):
context.Result(result)
return result
- conf = Configure(myenv, custom_tests = {
- 'CheckBoostMinVersion': CheckBoostMinVersion,
- })
+ conf = Configure(
+ myenv,
+ custom_tests={
+ 'CheckBoostMinVersion': CheckBoostMinVersion,
+ },
+ )
libdeps.setup_conftests(conf)
@@ -4078,8 +4442,7 @@ def doConfigure(myenv):
def maybeIssueDarwinSSLAdvice(env):
if env.TargetOSIs('macOS'):
- advice = textwrap.dedent(
- """\
+ advice = textwrap.dedent("""\
NOTE: Recent versions of macOS no longer ship headers for the system OpenSSL libraries.
NOTE: Either build without the --ssl flag, or describe how to find OpenSSL.
NOTE: Set the include path for the OpenSSL headers with the CPPPATH SCons variable.
@@ -4087,7 +4450,7 @@ def doConfigure(myenv):
NOTE: If you are using HomeBrew, and have installed OpenSSL, this might look like:
\tscons CPPPATH=/usr/local/opt/openssl/include LIBPATH=/usr/local/opt/openssl/lib ...
NOTE: Consult the output of 'brew info openssl' for details on the correct paths."""
- )
+ )
print(advice)
brew = env.WhereIs('brew')
if brew:
@@ -4096,11 +4459,10 @@ def doConfigure(myenv):
# we could give a better message here, but brew info's machine readable output
# doesn't seem to include the whole 'caveats' section.
message = subprocess.check_output([brew, "info", "openssl"]).decode('utf-8')
- advice = textwrap.dedent(
- """\
+ advice = textwrap.dedent("""\
NOTE: HomeBrew installed to {0} appears to have OpenSSL installed.
NOTE: Consult the output from '{0} info openssl' to determine CPPPATH and LIBPATH."""
- ).format(brew, message)
+ ).format(brew, message)
print(advice)
except:
@@ -4108,29 +4470,32 @@ def doConfigure(myenv):
if not conf.CheckLibWithHeader(
cryptoLibName,
- ["openssl/crypto.h"],
+ ["openssl/crypto.h"],
"C",
"SSLeay_version(0);",
- autoadd=True):
+ autoadd=True,
+ ):
maybeIssueDarwinSSLAdvice(conf.env)
conf.env.ConfError("Couldn't find OpenSSL crypto.h header and library")
def CheckLibSSL(context):
- res = SCons.Conftest.CheckLib(context,
- libs=[sslLibName],
- extra_libs=sslLinkDependencies,
- header='#include "openssl/ssl.h"',
- language="C",
- call="SSL_version(NULL);",
- autoadd=True)
+ res = SCons.Conftest.CheckLib(
+ context,
+ libs=[sslLibName],
+ extra_libs=sslLinkDependencies,
+ header='#include "openssl/ssl.h"',
+ language="C",
+ call="SSL_version(NULL);",
+ autoadd=True,
+ )
context.did_show_result = 1
return not res
conf.AddTest("CheckLibSSL", CheckLibSSL)
if not conf.CheckLibSSL():
- maybeIssueDarwinSSLAdvice(conf.env)
- conf.env.ConfError("Couldn't find OpenSSL ssl.h header and library")
+ maybeIssueDarwinSSLAdvice(conf.env)
+ conf.env.ConfError("Couldn't find OpenSSL ssl.h header and library")
def CheckLinkSSL(context):
test_body = """
@@ -4161,18 +4526,20 @@ def doConfigure(myenv):
conf.env.ConfError("SSL is enabled, but is unavailable")
if conf.CheckDeclaration(
- "FIPS_mode_set",
- includes="""
+ "FIPS_mode_set",
+ includes="""
#include <openssl/crypto.h>
#include <openssl/evp.h>
- """):
+ """,
+ ):
conf.env.SetConfigHeaderDefine('MONGO_CONFIG_HAVE_FIPS_MODE_SET')
if conf.CheckDeclaration(
- "d2i_ASN1_SEQUENCE_ANY",
- includes="""
+ "d2i_ASN1_SEQUENCE_ANY",
+ includes="""
#include <openssl/asn1.h>
- """):
+ """,
+ ):
conf.env.SetConfigHeaderDefine('MONGO_CONFIG_HAVE_ASN1_ANY_DEFINITIONS')
def CheckOpenSSL_EC_DH(context):
@@ -4229,26 +4596,35 @@ def doConfigure(myenv):
if conf.env.TargetOSIs('windows'):
# SChannel on Windows
ssl_provider = 'windows'
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_SSL_PROVIDER", "MONGO_CONFIG_SSL_PROVIDER_WINDOWS")
- conf.env.Append( MONGO_CRYPTO=["windows"] )
+ conf.env.SetConfigHeaderDefine(
+ "MONGO_CONFIG_SSL_PROVIDER",
+ "MONGO_CONFIG_SSL_PROVIDER_WINDOWS",
+ )
+ conf.env.Append(MONGO_CRYPTO=["windows"])
elif conf.env.TargetOSIs('darwin', 'macOS'):
# SecureTransport on macOS
ssl_provider = 'apple'
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_SSL_PROVIDER", "MONGO_CONFIG_SSL_PROVIDER_APPLE")
- conf.env.Append( MONGO_CRYPTO=["apple"] )
+ conf.env.SetConfigHeaderDefine(
+ "MONGO_CONFIG_SSL_PROVIDER",
+ "MONGO_CONFIG_SSL_PROVIDER_APPLE",
+ )
+ conf.env.Append(MONGO_CRYPTO=["apple"])
conf.env.AppendUnique(FRAMEWORKS=['CoreFoundation', 'Security'])
elif require_ssl:
checkOpenSSL(conf)
# Working OpenSSL available, use it.
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_SSL_PROVIDER", "MONGO_CONFIG_SSL_PROVIDER_OPENSSL")
- conf.env.Append( MONGO_CRYPTO=["openssl"] )
+ conf.env.SetConfigHeaderDefine(
+ "MONGO_CONFIG_SSL_PROVIDER",
+ "MONGO_CONFIG_SSL_PROVIDER_OPENSSL",
+ )
+ conf.env.Append(MONGO_CRYPTO=["openssl"])
ssl_provider = 'openssl'
else:
# If we don't need an SSL build, we can get by with TomCrypt.
- conf.env.Append( MONGO_CRYPTO=["tom"] )
+ conf.env.Append(MONGO_CRYPTO=["tom"])
if require_ssl:
# Either crypto engine is native,
@@ -4265,10 +4641,12 @@ def doConfigure(myenv):
# libcurl on all other platforms
if conf.CheckLibWithHeader(
- "curl",
- ["curl/curl.h"], "C",
- "curl_global_init(0);",
- autoadd=False):
+ "curl",
+ ["curl/curl.h"],
+ "C",
+ "curl_global_init(0);",
+ autoadd=False,
+ ):
return True
if required:
@@ -4327,33 +4705,29 @@ def doConfigure(myenv):
conf.env['LIBDEPS_ICUUC_SYSLIBDEP'] = 'icuuc'
if wiredtiger and use_system_version_of_library("wiredtiger"):
- if not conf.CheckCXXHeader( "wiredtiger.h" ):
+ if not conf.CheckCXXHeader("wiredtiger.h"):
myenv.ConfError("Cannot find wiredtiger headers")
conf.FindSysLibDep("wiredtiger", ["wiredtiger"])
- conf.env.Append(
- CPPDEFINES=[
- ("BOOST_THREAD_VERSION", "5"),
- "BOOST_THREAD_USES_DATETIME",
- "BOOST_SYSTEM_NO_DEPRECATED",
- "BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS",
- "BOOST_ENABLE_ASSERT_DEBUG_HANDLER",
- "BOOST_LOG_NO_SHORTHAND_NAMES",
- "BOOST_LOG_USE_NATIVE_SYSLOG",
- "BOOST_LOG_WITHOUT_THREAD_ATTR",
- "ABSL_FORCE_ALIGNED_ACCESS",
- ]
- )
+ conf.env.Append(CPPDEFINES=[
+ ("BOOST_THREAD_VERSION", "5"),
+ "BOOST_THREAD_USES_DATETIME",
+ "BOOST_SYSTEM_NO_DEPRECATED",
+ "BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS",
+ "BOOST_ENABLE_ASSERT_DEBUG_HANDLER",
+ "BOOST_LOG_NO_SHORTHAND_NAMES",
+ "BOOST_LOG_USE_NATIVE_SYSLOG",
+ "BOOST_LOG_WITHOUT_THREAD_ATTR",
+ "ABSL_FORCE_ALIGNED_ACCESS",
+ ])
if link_model.startswith("dynamic") and not link_model == 'dynamic-sdk':
- conf.env.AppendUnique(
- CPPDEFINES=[
- "BOOST_LOG_DYN_LINK",
- ]
- )
+ conf.env.AppendUnique(CPPDEFINES=[
+ "BOOST_LOG_DYN_LINK",
+ ])
if use_system_version_of_library("boost"):
- if not conf.CheckCXXHeader( "boost/filesystem/operations.hpp" ):
+ if not conf.CheckCXXHeader("boost/filesystem/operations.hpp"):
myenv.ConfError("can't find boost headers")
if not conf.CheckBoostMinVersion():
myenv.ConfError("system's version of boost is too old. version 1.49 or better required")
@@ -4369,7 +4743,8 @@ def doConfigure(myenv):
conf.FindSysLibDep(
boostlib,
[boostlib + suffix for suffix in boostSuffixList],
- language='C++')
+ language='C++',
+ )
if posix_system:
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_HAVE_HEADER_UNISTD_H")
conf.CheckLib('rt')
@@ -4381,25 +4756,26 @@ def doConfigure(myenv):
if get_option('use-diagnostic-latches') == 'off':
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_USE_RAW_LATCHES")
- if (conf.CheckCXXHeader( "execinfo.h" ) and
- conf.CheckDeclaration('backtrace', includes='#include <execinfo.h>') and
- conf.CheckDeclaration('backtrace_symbols', includes='#include <execinfo.h>') and
- conf.CheckDeclaration('backtrace_symbols_fd', includes='#include <execinfo.h>')):
+ if (conf.CheckCXXHeader("execinfo.h")
+ and conf.CheckDeclaration('backtrace', includes='#include <execinfo.h>')
+ and conf.CheckDeclaration('backtrace_symbols', includes='#include <execinfo.h>')
+ and conf.CheckDeclaration('backtrace_symbols_fd', includes='#include <execinfo.h>')):
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_HAVE_EXECINFO_BACKTRACE")
- conf.env["_HAVEPCAP"] = conf.CheckLib( ["pcap", "wpcap"], autoadd=False )
+ conf.env["_HAVEPCAP"] = conf.CheckLib(["pcap", "wpcap"], autoadd=False)
if env.TargetOSIs('solaris'):
- conf.CheckLib( "nsl" )
+ conf.CheckLib("nsl")
conf.env['MONGO_BUILD_SASL_CLIENT'] = bool(has_option("use-sasl-client"))
if conf.env['MONGO_BUILD_SASL_CLIENT'] and not conf.CheckLibWithHeader(
"sasl2",
- ["stddef.h","sasl/sasl.h"],
+ ["stddef.h", "sasl/sasl.h"],
"C",
"sasl_version_info(0, 0, 0, 0, 0, 0);",
- autoadd=False ):
+ autoadd=False,
+ ):
myenv.ConfError("Couldn't find SASL header/libraries")
# requires ports devel/libexecinfo to be installed
@@ -4436,15 +4812,13 @@ def doConfigure(myenv):
}}
""".format(base_type)
- context.Message(
- "Checking if std::atomic<{0}> works{1}... ".format(
- base_type, extra_message
- )
- )
+ context.Message("Checking if std::atomic<{0}> works{1}... ".format(
+ base_type, extra_message))
ret = context.TryLink(textwrap.dedent(test_body), ".cpp")
context.Result(ret)
return ret
+
conf.AddTest("CheckStdAtomic", CheckStdAtomic)
def check_all_atomics(extra_message=''):
@@ -4456,7 +4830,7 @@ def doConfigure(myenv):
if not check_all_atomics():
if not conf.CheckLib('atomic', symbol=None, header=None, language='C', autoadd=1):
myenv.ConfError("Some atomic ops are not intrinsically supported, but "
- "no libatomic found")
+ "no libatomic found")
if not check_all_atomics(' with libatomic'):
myenv.ConfError("The toolchain does not support std::atomic, cannot continue")
@@ -4490,7 +4864,8 @@ def doConfigure(myenv):
}} ha;
""".format(size)
- context.Message('Checking for extended alignment {0} for concurrency types... '.format(size))
+ context.Message(
+ 'Checking for extended alignment {0} for concurrency types... '.format(size))
ret = context.TryCompile(textwrap.dedent(test_body), ".cpp")
context.Result(ret)
return ret
@@ -4501,7 +4876,7 @@ def doConfigure(myenv):
# architecture, assume 64 byte cache lines, which is pretty
# standard. If for some reason the compiler can't offer that, try
# 32.
- default_alignment_search_sequence = [ 64, 32 ]
+ default_alignment_search_sequence = [64, 32]
# The following are the target architectures for which we have
# some knowledge that they have larger cache line sizes. In
@@ -4509,11 +4884,12 @@ def doConfigure(myenv):
# start at the goal state, and work down until we find something
# the compiler can actualy do for us.
extended_alignment_search_sequence = {
- 'ppc64le' : [ 128, 64, 32 ],
- 's390x' : [ 256, 128, 64, 32 ],
+ 'ppc64le': [128, 64, 32],
+ 's390x': [256, 128, 64, 32],
}
- for size in extended_alignment_search_sequence.get(env['TARGET_ARCH'], default_alignment_search_sequence):
+ for size in extended_alignment_search_sequence.get(env['TARGET_ARCH'],
+ default_alignment_search_sequence):
if conf.CheckExtendedAlignment(size):
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_MAX_EXTENDED_ALIGNMENT", size)
break
@@ -4538,11 +4914,12 @@ def doConfigure(myenv):
conf.env['MONGO_HAVE_LIBMONGOC'] = False
if mongoc_mode != 'off':
if conf.CheckLibWithHeader(
- ["mongoc-1.0"],
- ["mongoc/mongoc.h"],
+ ["mongoc-1.0"],
+ ["mongoc/mongoc.h"],
"C",
"mongoc_get_major_version();",
- autoadd=False ):
+ autoadd=False,
+ ):
conf.env['MONGO_HAVE_LIBMONGOC'] = True
if not conf.env['MONGO_HAVE_LIBMONGOC'] and mongoc_mode == 'on':
myenv.ConfError("Failed to find the required C driver headers")
@@ -4629,11 +5006,14 @@ def doConfigure(myenv):
conf.AddTest('CheckAltivecVbpermqOutput', CheckAltivecVbpermqOutput)
- outputIndex = next((idx for idx in [0,1] if conf.CheckAltivecVbpermqOutput(idx)), None)
+ outputIndex = next((idx for idx in [0, 1] if conf.CheckAltivecVbpermqOutput(idx)), None)
if outputIndex is not None:
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex)
+ conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX",
+ outputIndex)
else:
- myenv.ConfError("Running on ppc64le, but can't find a correct vec_vbpermq output index. Compiler or platform not supported")
+ myenv.ConfError(
+ "Running on ppc64le, but can't find a correct vec_vbpermq output index. Compiler or platform not supported"
+ )
myenv = conf.Finish()
@@ -4649,7 +5029,8 @@ def doConfigure(myenv):
usdt_provider = 'SDT'
# can put other OS targets here
if usdt_enabled == 'on' and not usdt_provider:
- myenv.ConfError("enable-usdt-probes flag was set to on, but no USDT provider could be found")
+ myenv.ConfError(
+ "enable-usdt-probes flag was set to on, but no USDT provider could be found")
elif usdt_provider:
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_USDT_ENABLED")
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_USDT_PROVIDER", usdt_provider)
@@ -4658,11 +5039,9 @@ def doConfigure(myenv):
return myenv
-
-env = doConfigure( env )
+env = doConfigure(env)
env["NINJA_SYNTAX"] = "#site_scons/third_party/ninja_syntax.py"
-
if env.ToolchainIs("clang"):
env["ICECC_COMPILER_TYPE"] = "clang"
elif env.ToolchainIs("gcc"):
@@ -4724,9 +5103,13 @@ if env.GetOption('num_jobs') == altered_num_jobs:
# give them a very slow build.
if cpu_count is None:
if get_option("ninja") != "disabled":
- env.FatalError("Cannot auto-determine the appropriate size for the Ninja local_job pool. Please regenerate with an explicit -j argument to SCons")
+ env.FatalError(
+ "Cannot auto-determine the appropriate size for the Ninja local_job pool. Please regenerate with an explicit -j argument to SCons"
+ )
else:
- env.FatalError("Cannot auto-determine the appropriate build parallelism on this platform. Please build with an explicit -j argument to SCons")
+ env.FatalError(
+ "Cannot auto-determine the appropriate build parallelism on this platform. Please build with an explicit -j argument to SCons"
+ )
if 'ICECC' in env and env['ICECC'] and get_option("ninja") == "disabled":
# If SCons is driving and we are using icecream, scale up the
@@ -4740,35 +5123,39 @@ if env.GetOption('num_jobs') == altered_num_jobs:
# pool. Scale that up to the number of local CPUs.
env.SetOption('num_jobs', cpu_count)
else:
- if (not has_option('force-jobs')
- and ('ICECC' not in env or not env['ICECC'])
- and env.GetOption('num_jobs') > cpu_count):
+ if (not has_option('force-jobs') and ('ICECC' not in env or not env['ICECC'])
+ and env.GetOption('num_jobs') > cpu_count):
env.FatalError("ERROR: Icecream not enabled while using -j higher than available cpu's. " +
- "Use --force-jobs to override.")
+ "Use --force-jobs to override.")
-if (get_option('ninja') != "disabled"
- and ('ICECC' not in env or not env['ICECC'])
- and not has_option('force-jobs')):
+if (get_option('ninja') != "disabled" and ('ICECC' not in env or not env['ICECC'])
+ and not has_option('force-jobs')):
print(f"WARNING: Icecream not enabled - Ninja concurrency will be capped at {cpu_count} jobs " +
- "without regard to the -j value passed to it. " +
- "Generate your ninja file with --force-jobs to disable this behavior.")
+ "without regard to the -j value passed to it. " +
+ "Generate your ninja file with --force-jobs to disable this behavior.")
env['NINJA_MAX_JOBS'] = cpu_count
if get_option('ninja') != 'disabled':
if 'ICECREAM_VERSION' in env and not env.get('CCACHE', None):
if env['ICECREAM_VERSION'] < parse_version("1.2"):
- env.FatalError("Use of ccache is mandatory with --ninja and icecream older than 1.2. You are running {}.".format(env['ICECREAM_VERSION']))
+ env.FatalError(
+ "Use of ccache is mandatory with --ninja and icecream older than 1.2. You are running {}."
+ .format(env['ICECREAM_VERSION']))
ninja_builder = Tool("ninja")
env["NINJA_BUILDDIR"] = env.Dir("$NINJA_BUILDDIR")
ninja_builder.generate(env)
- ninjaConf = Configure(env, help=False, custom_tests = {
- 'CheckNinjaCompdbExpand': env.CheckNinjaCompdbExpand,
- })
+ ninjaConf = Configure(
+ env,
+ help=False,
+ custom_tests={
+ 'CheckNinjaCompdbExpand': env.CheckNinjaCompdbExpand,
+ },
+ )
env['NINJA_COMPDB_EXPAND'] = ninjaConf.CheckNinjaCompdbExpand()
ninjaConf.Finish()
@@ -4817,7 +5204,8 @@ if get_option('ninja') != 'disabled':
pool="local_pool",
use_depfile=False,
use_response_file=True,
- response_file_content="$rspc $in_newline")
+ response_file_content="$rspc $in_newline",
+ )
# Setup the response file content generation to use our workaround rule
# for LINK commands.
@@ -4847,7 +5235,9 @@ if get_option('ninja') != 'disabled':
rsp_content.append(opt)
ninja_build["variables"]["rspc"] = ' '.join(rsp_content)
- ninja_build["inputs"] += [infile for infile in inputs if infile not in ninja_build["inputs"]]
+ ninja_build["inputs"] += [
+ infile for infile in inputs if infile not in ninja_build["inputs"]
+ ]
# We apply the workaround to all Program nodes as they have potential
# response files that have lines that are too long.
@@ -4878,7 +5268,8 @@ if get_option('ninja') != 'disabled':
)
def get_idlc_command(env, node, action, targets, sources, executor=None):
- _, variables, _ = env.NinjaGetGenericShellCommand(node, action, targets, sources, executor=executor)
+ _, variables, _ = env.NinjaGetGenericShellCommand(node, action, targets, sources,
+ executor=executor)
variables["msvc_deps_prefix"] = "import file:"
return "IDLC", variables, env.subst(env['IDLC']).split()
@@ -4910,9 +5301,7 @@ if get_option('ninja') != 'disabled':
"outputs": [node.get_path()],
"rule": "TEST_LIST",
"implicit": test_files,
- "variables": {
- "files": files,
- }
+ "variables": {"files": files, },
}
if env["PLATFORM"] == "win32":
@@ -4929,7 +5318,6 @@ if get_option('ninja') != 'disabled':
env['NINJA_GENERATED_SOURCE_ALIAS_NAME'] = 'generated-sources'
-
if get_option('separate-debug') == "on" or env.TargetOSIs("windows"):
# The current ninja builder can't handle --separate-debug on non-Windows platforms
@@ -4940,7 +5328,9 @@ if get_option('separate-debug') == "on" or env.TargetOSIs("windows"):
separate_debug = Tool('separate_debug')
if not separate_debug.exists(env):
- env.FatalError('Cannot honor --separate-debug because the separate_debug.py Tool reported as nonexistent')
+ env.FatalError(
+ 'Cannot honor --separate-debug because the separate_debug.py Tool reported as nonexistent'
+ )
separate_debug(env)
env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = "tgz"
@@ -4954,26 +5344,15 @@ env.Tool('auto_archive')
env.DeclareRoles(
roles=[
-
- env.Role(
- name="base",
- ),
-
- env.Role(
- name="debug",
- ),
-
+ env.Role(name="base", ),
+ env.Role(name="debug", ),
env.Role(
name="dev",
dependencies=[
- "runtime"
+ "runtime",
],
),
-
- env.Role(
- name="meta",
- ),
-
+ env.Role(name="meta", ),
env.Role(
name="runtime",
dependencies=[
@@ -4989,6 +5368,7 @@ env.DeclareRoles(
meta_role="meta",
)
+
def _aib_debugdir(source, target, env, for_signature):
for s in source:
origin = getattr(s.attributes, "debug_file_for", None)
@@ -4997,7 +5377,9 @@ def _aib_debugdir(source, target, env, for_signature):
map_entry = env["AIB_SUFFIX_MAP"].get(osuf)
if map_entry:
return map_entry[0]
- env.FatalError("Unable to find debuginfo file in _aib_debugdir: (source='{}')".format(str(source)))
+ env.FatalError("Unable to find debuginfo file in _aib_debugdir: (source='{}')".format(
+ str(source)))
+
env["PREFIX_DEBUGDIR"] = _aib_debugdir
@@ -5108,11 +5490,12 @@ env.AddPackageNameAlias(
name="mh-debugsymbols",
)
+
def rpath_generator(env, source, target, for_signature):
# If the PREFIX_LIBDIR has an absolute path, we will use that directly as
# RPATH because that indicates the final install destination of the libraries.
prefix_libdir = env.subst('$PREFIX_LIBDIR')
- if os.path.isabs(prefix_libdir):
+ if os.path.isabs(prefix_libdir):
return ['$PREFIX_LIBDIR']
# If the PREFIX_LIBDIR is not an absolute path, we will use a relative path
@@ -5123,7 +5506,10 @@ def rpath_generator(env, source, target, for_signature):
return [env.Literal(f"\\$$ORIGIN/{lib_rel}")]
if env['PLATFORM'] == 'darwin':
- return [f"@loader_path/{lib_rel}",]
+ return [
+ f"@loader_path/{lib_rel}",
+ ]
+
env['RPATH_GENERATOR'] = rpath_generator
@@ -5141,8 +5527,7 @@ if env['PLATFORM'] == 'posix':
SHLINKFLAGS=[
# -h works for both the sun linker and the gnu linker.
"-Wl,-h,${TARGET.file}",
- ]
- )
+ ])
elif env['PLATFORM'] == 'darwin':
# The darwin case uses an adhoc implementation of RPATH for SCons
# since SCons does not support RPATH directly for macOS:
@@ -5175,13 +5560,15 @@ incremental_link = Tool('incremental_link')
if incremental_link.exists(env):
incremental_link(env)
+
# Resource Files are Windows specific
def env_windows_resource_file(env, path):
if env.TargetOSIs('windows'):
- return [ env.RES(path) ]
+ return [env.RES(path)]
else:
return []
+
env.AddMethod(env_windows_resource_file, 'WindowsResourceFile')
# --- lint ----
@@ -5190,7 +5577,7 @@ if get_option('lint-scope') == 'changed':
patch_file = env.Command(
target="$BUILD_DIR/current.git.patch",
source=[env.WhereIs("git")],
- action="${SOURCES[0]} diff $GITDIFFFLAGS > $TARGET"
+ action="${SOURCES[0]} diff $GITDIFFFLAGS > $TARGET",
)
env.AlwaysBuild(patch_file)
@@ -5201,7 +5588,8 @@ if get_option('lint-scope') == 'changed':
"buildscripts/pylinters.py",
patch_file,
],
- action="REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff"
+ action=
+ "REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff",
)
clang_format = env.Command(
@@ -5210,7 +5598,8 @@ if get_option('lint-scope') == 'changed':
"buildscripts/clang_format.py",
patch_file,
],
- action="REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff"
+ action=
+ "REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff",
)
eslint = env.Command(
@@ -5219,7 +5608,8 @@ if get_option('lint-scope') == 'changed':
"buildscripts/eslint.py",
patch_file,
],
- action="REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff"
+ action=
+ "REVISION=$REVISION ENTERPRISE_REV=$ENTERPRISE_REV $PYTHON ${SOURCES[0]} lint-git-diff",
)
else:
@@ -5228,7 +5618,7 @@ else:
source=[
"buildscripts/pylinters.py",
],
- action="$PYTHON ${SOURCES[0]} lint-all"
+ action="$PYTHON ${SOURCES[0]} lint-all",
)
clang_format = env.Command(
@@ -5236,7 +5626,7 @@ else:
source=[
"buildscripts/clang_format.py",
],
- action="$PYTHON ${SOURCES[0]} lint-all"
+ action="$PYTHON ${SOURCES[0]} lint-all",
)
eslint = env.Command(
@@ -5247,6 +5637,14 @@ else:
action="$PYTHON ${SOURCES[0]} --dirmode lint jstests/ src/mongo",
)
+sconslinters = env.Command(
+ target="#lint-sconslinters",
+ source=[
+ "buildscripts/pylinters.py",
+ ],
+ action="$PYTHON ${SOURCES[0]} lint-scons",
+)
+
lint_py = env.Command(
target="#lint-lint.py",
source=["buildscripts/quickcpplint.py"],
@@ -5259,13 +5657,14 @@ lint_errorcodes = env.Command(
action="$PYTHON ${SOURCES[0]} --quiet",
)
-env.Alias( "lint" , [ lint_py, eslint, clang_format, pylinters, lint_errorcodes ] )
-env.Alias( "lint-fast" , [ eslint, clang_format, pylinters, lint_errorcodes ] )
-env.AlwaysBuild( "lint" )
-env.AlwaysBuild( "lint-fast" )
+env.Alias("lint", [lint_py, eslint, clang_format, pylinters, sconslinters, lint_errorcodes])
+env.Alias("lint-fast", [eslint, clang_format, pylinters, sconslinters, lint_errorcodes])
+env.AlwaysBuild("lint")
+env.AlwaysBuild("lint-fast")
# ---- INSTALL -------
+
def getSystemInstallName():
arch_name = env.subst('$MONGO_DISTARCH')
@@ -5275,14 +5674,14 @@ def getSystemInstallName():
# to the translation dictionary below.
os_name_translations = {
'windows': 'win32',
- 'macOS': 'macos'
+ 'macOS': 'macos',
}
os_name = env.GetTargetOSName()
os_name = os_name_translations.get(os_name, os_name)
n = os_name + "-" + arch_name
if len(mongo_modules):
- n += "-" + "-".join(m.name for m in mongo_modules)
+ n += "-" + "-".join(m.name for m in mongo_modules)
dn = env.subst('$MONGO_DISTMOD')
if len(dn) > 0:
@@ -5290,6 +5689,7 @@ def getSystemInstallName():
return n
+
# This function will add the version.txt file to the source tarball
# so that versioning will work without having the git repo available.
def add_version_to_distsrc(env, archive):
@@ -5305,18 +5705,17 @@ def add_version_to_distsrc(env, archive):
version_data,
sort_keys=True,
indent=4,
- separators=(',', ': ')
- )
- )
+ separators=(',', ': '),
+ ))
+
env.AddDistSrcCallback(add_version_to_distsrc)
env['SERVER_DIST_BASENAME'] = env.subst('mongodb-%s-$MONGO_DISTNAME' % (getSystemInstallName()))
env['MH_DIST_BASENAME'] = 'mh'
if get_option('legacy-tarball') == 'true':
- if ('tar-dist' not in COMMAND_LINE_TARGETS and
- 'zip-dist' not in COMMAND_LINE_TARGETS and
- 'archive-dist' not in COMMAND_LINE_TARGETS):
+ if ('tar-dist' not in COMMAND_LINE_TARGETS and 'zip-dist' not in COMMAND_LINE_TARGETS
+ and 'archive-dist' not in COMMAND_LINE_TARGETS):
env.FatalError('option --legacy-tarball only valid with an archive-dist target')
env['PREFIX'] = '$SERVER_DIST_BASENAME'
@@ -5334,22 +5733,24 @@ module_sconscripts = moduleconfig.get_module_sconscripts(mongo_modules)
# 5.1.0-alpha1-123 => ['5', '1', '0', 'alpha1-123', 'alpha', '1'] => [5, 1, 0, -49]
# 5.1.1 => ['5', '1', '1', '', None, None] => [5, 1, 1, 0]
-version_parts = [ x for x in re.match(r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc|alpha)(\d?))?.*)?',
- env['MONGO_VERSION']).groups() ]
+version_parts = [
+ x for x in re.match(r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc|alpha)(\d?))?.*)?',
+ env['MONGO_VERSION']).groups()
+]
version_extra = version_parts[3] if version_parts[3] else ""
if version_parts[4] == 'rc':
version_parts[3] = int(version_parts[5]) + -25
elif version_parts[4] == 'alpha':
- if version_parts[5] == '':
- version_parts[3] = -50
- else:
- version_parts[3] = int(version_parts[5]) + -50
+ if version_parts[5] == '':
+ version_parts[3] = -50
+ else:
+ version_parts[3] = int(version_parts[5]) + -50
elif version_parts[3]:
version_parts[2] = int(version_parts[2]) + 1
version_parts[3] = -100
else:
version_parts[3] = 0
-version_parts = [ int(x) for x in version_parts[:4]]
+version_parts = [int(x) for x in version_parts[:4]]
# The following symbols are exported for use in subordinate SConscript files.
# Ideally, the SConscript files would be purely declarative. They would only
@@ -5381,15 +5782,21 @@ Export([
'wiredtiger',
])
+
def injectMongoIncludePaths(thisEnv):
thisEnv.AppendUnique(CPPPATH=['$BUILD_DIR'])
+
+
env.AddMethod(injectMongoIncludePaths, 'InjectMongoIncludePaths')
+
def injectModule(env, module, **kwargs):
injector = env['MODULE_INJECTORS'].get(module)
if injector:
return injector(env, **kwargs)
return env
+
+
env.AddMethod(injectModule, 'InjectModule')
if get_option('ninja') == 'disabled':
@@ -5399,7 +5806,6 @@ if get_option('ninja') == 'disabled':
env.Requires(compileCommands, env.Alias("generated-sources"))
compileDb = env.Alias("compiledb", compileCommands)
-
msvc_version = ""
if 'MSVC_VERSION' in env and env['MSVC_VERSION']:
msvc_version = "--version " + env['MSVC_VERSION'] + " "
@@ -5409,7 +5815,8 @@ if get_option("ninja") == "disabled":
vcxprojFile = env.Command(
"mongodb.vcxproj",
compileCommands,
- r"$PYTHON buildscripts\make_vcxproj.py " + msvc_version + "mongodb")
+ r"$PYTHON buildscripts\make_vcxproj.py " + msvc_version + "mongodb",
+ )
vcxproj = env.Alias("vcxproj", vcxprojFile)
# TODO: maybe make these work like the other archive- aliases
@@ -5421,7 +5828,8 @@ env.Alias("distsrc-tar", distSrc)
distSrcGzip = env.GZip(
target="distsrc.tgz",
source=[distSrc],
- NINJA_SKIP=True)
+ NINJA_SKIP=True,
+)
env.NoCache(distSrcGzip)
env.Alias("distsrc-tgz", distSrcGzip)
@@ -5477,12 +5885,13 @@ if has_option('jlink'):
# Keep this late in the game so that we can investigate attributes set by all the tools that have run.
if has_option("cache"):
if get_option("cache") == "nolinked":
+
def noCacheEmitter(target, source, env):
for t in target:
try:
if getattr(t.attributes, 'thin_archive', False):
continue
- except(AttributeError):
+ except (AttributeError):
pass
env.NoCache(t)
return target, source
@@ -5490,7 +5899,7 @@ if has_option("cache"):
def addNoCacheEmitter(builder):
origEmitter = builder.emitter
if SCons.Util.is_Dict(origEmitter):
- for k,v in origEmitter:
+ for k, v in origEmitter:
origEmitter[k] = SCons.Builder.ListEmitter([v, noCacheEmitter])
elif SCons.Util.is_List(origEmitter):
origEmitter.append(noCacheEmitter)
@@ -5531,7 +5940,8 @@ cachePrune = env.Command(
source=[
"#buildscripts/scons_cache_prune.py",
],
- action="$PYTHON ${SOURCES[0]} --cache-dir=${CACHE_DIR.abspath} --cache-size=${CACHE_SIZE} --prune-ratio=${CACHE_PRUNE_TARGET/100.00}",
+ action=
+ "$PYTHON ${SOURCES[0]} --cache-dir=${CACHE_DIR.abspath} --cache-size=${CACHE_SIZE} --prune-ratio=${CACHE_PRUNE_TARGET/100.00}",
CACHE_DIR=env.Dir(cacheDir),
)
@@ -5549,7 +5959,6 @@ env.Alias('configure', None)
# auto_install_binaries to finalize the installation setup.
env.FinalizeInstallDependencies()
-
# We don't want installing files to cause them to flow into the cache,
# since presumably we can re-install them from the origin if needed.
env.NoCache(env.FindInstalledFiles())
diff --git a/buildscripts/pylinters.py b/buildscripts/pylinters.py
index a7cf624a6e7..6b75a88e181 100755
--- a/buildscripts/pylinters.py
+++ b/buildscripts/pylinters.py
@@ -33,6 +33,11 @@ _LINTERS = [
mypy.MypyLinter(),
]
+# List of supported SCons linters
+_SCONS_LINTERS: List[base.LinterBase] = [
+ yapf.YapfLinter(),
+]
+
def get_py_linter(linter_filter):
# type: (str) -> List[base.LinterBase]
@@ -40,11 +45,15 @@ def get_py_linter(linter_filter):
Get a list of linters to use.
'all' or None - select all linters
+ 'scons' - get all scons linters
'a,b,c' - a comma delimited list is describes a list of linters to choose
"""
if linter_filter is None or linter_filter == "all":
return _LINTERS
+ if linter_filter == "scons":
+ return _SCONS_LINTERS
+
linter_list = linter_filter.split(",")
linter_candidates = [linter for linter in _LINTERS if linter.cmd_name in linter_list]
@@ -66,6 +75,18 @@ def is_interesting_file(file_name):
return file_name.endswith(".py") and file_name.startswith(tuple(directory_list))
+def is_scons_file(file_name):
+ # type: (str) -> bool
+ """Return true if this file is related to SCons."""
+ file_denylist = [] # type: List[str]
+ directory_denylist = ["site_scons/third_party"]
+ if file_name in file_denylist or file_name.startswith(tuple(directory_denylist)):
+ return False
+ return (file_name.endswith("SConscript") and file_name.startswith("src")) or \
+ (file_name.endswith(".py") and file_name.startswith("site_scons")) or \
+ file_name == "SConstruct"
+
+
def _lint_files(linters, config_dict, file_names):
# type: (str, Dict[str, str], List[str]) -> None
"""Lint a list of files with clang-format."""
@@ -128,6 +149,14 @@ def lint_all(linters, config_dict, file_names):
_lint_files(linters, config_dict, all_file_names)
+def lint_scons(linters, config_dict, file_names):
+ # type: (str, Dict[str, str], List[str]) -> None
+ """Lint SCons files command entry point."""
+ scons_file_names = git.get_files_to_check(file_names, is_scons_file)
+
+ _lint_files(linters, config_dict, scons_file_names)
+
+
def _fix_files(linters, config_dict, file_names):
# type: (str, Dict[str, str], List[str]) -> None
"""Fix a list of files with linters if possible."""
@@ -164,6 +193,14 @@ def fix_func(linters, config_dict, file_names):
_fix_files(linters, config_dict, all_file_names)
+def fix_scons_func(linters, config_dict, file_names):
+ # type: (str, Dict[str, str], List[str]) -> None
+ """Fix SCons files command entry point."""
+ scons_file_names = git.get_files_to_check(file_names, is_scons_file)
+
+ _fix_files(linters, config_dict, scons_file_names)
+
+
def main():
# type: () -> None
"""Execute Main entry point."""
@@ -207,6 +244,14 @@ def main():
parser_fix.add_argument("file_names", nargs="*", help="Globs of files to check")
parser_fix.set_defaults(func=fix_func)
+ parser_lint = sub.add_parser('lint-scons', help='Lint only SCons files')
+ parser_lint.add_argument("file_names", nargs="*", help="Globs of files to check")
+ parser_lint.set_defaults(func=lint_scons, linters="scons")
+
+ parser_fix = sub.add_parser('fix-scons', help='Fix SCons related files if possible')
+ parser_fix.add_argument("file_names", nargs="*", help="Globs of files to check")
+ parser_fix.set_defaults(func=fix_scons_func, linters="scons")
+
args = parser.parse_args()
# Create a dictionary of linter locations if the user needs to override the location of a
diff --git a/docs/linting.md b/docs/linting.md
index a6a13fa2d19..a499505442d 100644
--- a/docs/linting.md
+++ b/docs/linting.md
@@ -73,6 +73,11 @@ Ex: `buildscripts/pylinters.py lint`
| `pydocstyle` | `.pydocstyle` | `pydocstyle --help` | [https://readthedocs.org/projects/pydocstyle/](https://readthedocs.org/projects/pydocstyle/) |
| `yapf` | `.style.yapf` | `yapf --help` | [https://github.com/google/yapf](https://github.com/google/yapf) |
+### SCons Linters
+`buildscripts/pylinters.py` has the `lint-scons` and `fix-scons` commands to lint
+and fix SCons and build system related code. Currently `yapf` is the only
+linter supported for SCons code.
+
## Using SCons for linting
You can use SCons to run most of the linters listed above via their corresponding Python wrapper
script. SCons also provides the ability to run multiple linters in a single command. At this time,
@@ -89,3 +94,4 @@ Here are some examples:
| `lint-lint.py` | `quickcpplint.py` | `buildscripts/scons.py lint-lint.py` |
| `lint-eslint` | `eslint` | `buildscripts/scons.py lint-eslint` |
| `lint-pylinters` | `pylint` `mypy` `pydocstyle` `yapf` | `buildscripts/scons.py lint-pylinters` |
+| `lint-sconslinters` | `yapf` | `buildscripts/scons.py lint-sconslinters` |
diff --git a/etc/evergreen_yml_components/definitions.yml b/etc/evergreen_yml_components/definitions.yml
index 5552f136010..5b5cc15e50b 100644
--- a/etc/evergreen_yml_components/definitions.yml
+++ b/etc/evergreen_yml_components/definitions.yml
@@ -2934,6 +2934,25 @@ tasks:
vars:
targets: lint-pylinters
+- name: lint_sconslinters
+ tags: ["lint"]
+ commands:
+ - command: timeout.update
+ params:
+ # 40 minutes
+ exec_timeout_secs: 2400
+ - *f_expansions_write
+ - command: manifest.load
+ - func: "git get project and add git tag"
+ - *f_expansions_write
+ - *kill_processes
+ - *cleanup_environment
+ - func: "set up venv"
+ - func: "upload pip requirements"
+ - func: "scons lint"
+ vars:
+ targets: lint-sconslinters
+
- name: lint_clang_format
tags: ["lint"]
commands:
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py
index ee57adf15a3..767f2a7baf4 100644
--- a/site_scons/libdeps.py
+++ b/site_scons/libdeps.py
@@ -77,14 +77,13 @@ import SCons
from SCons.Script import COMMAND_LINE_TARGETS
-
class Constants:
Libdeps = "LIBDEPS"
LibdepsCached = "LIBDEPS_cached"
LibdepsDependents = "LIBDEPS_DEPENDENTS"
LibdepsGlobal = "LIBDEPS_GLOBAL"
LibdepsNoInherit = "LIBDEPS_NO_INHERIT"
- LibdepsInterface ="LIBDEPS_INTERFACE"
+ LibdepsInterface = "LIBDEPS_INTERFACE"
LibdepsPrivate = "LIBDEPS_PRIVATE"
LibdepsTags = "LIBDEPS_TAGS"
LibdepsTagExpansion = "LIBDEPS_TAG_EXPANSIONS"
@@ -210,7 +209,6 @@ class FlaggedLibdep:
result.append(next_contig_str)
-
class LibdepLinter:
"""
This class stores the rules for linting the libdeps. Using a decorator,
@@ -245,6 +243,7 @@ class LibdepLinter:
"""
funcs = {}
+
def linter_rule_func(func):
funcs[func.__name__] = func
return func
@@ -280,14 +279,17 @@ class LibdepLinter:
self.unique_libs = set()
self._libdeps_types_previous = dict()
-
# If we are in print mode, we will record some linting metrics,
# and print the results at the end of the build.
if self.__class__.print_linter_errors and not self.__class__.registered_linting_time:
import atexit
+
def print_linting_time():
print(f"Spent {self.__class__.linting_time} seconds linting libdeps.")
- print(f"Found {self.__class__.linting_infractions} issues out of {self.__class__.linting_rules_run} libdeps rules checked.")
+ print(
+ f"Found {self.__class__.linting_infractions} issues out of {self.__class__.linting_rules_run} libdeps rules checked."
+ )
+
atexit.register(print_linting_time)
self.__class__.registered_linting_time = True
@@ -303,16 +305,13 @@ class LibdepLinter:
return
start = self._start_timer()
- linter_rules = [
- getattr(self, linter_rule)
- for linter_rule in self.linter_rule.all
- ]
+ linter_rules = [getattr(self, linter_rule) for linter_rule in self.linter_rule.all]
for libdep in libdeps:
for linter_rule in linter_rules:
linter_rule(libdep)
- self._stop_timer(start, len(linter_rules)*len(libdeps))
+ self._stop_timer(start, len(linter_rules) * len(libdeps))
def final_checks(self):
# Build performance optimization if you
@@ -322,8 +321,7 @@ class LibdepLinter:
start = self._start_timer()
linter_rules = [
- getattr(self.__class__, rule)
- for rule in self.__class__.linter_final_check.all
+ getattr(self.__class__, rule) for rule in self.__class__.linter_final_check.all
]
for linter_rule in linter_rules:
@@ -372,11 +370,8 @@ class LibdepLinter:
return deps_dependents
def _get_deps_dependents_with_types(self, builder, type):
- return [
- (dependent[0], builder) if isinstance(dependent, tuple) else
- (dependent, builder)
- for dependent in self.env.get(type, [])
- ]
+ return [(dependent[0], builder) if isinstance(dependent, tuple) else (dependent, builder)
+ for dependent in self.env.get(type, [])]
@linter_rule
def linter_rule_leaf_node_no_deps(self, libdep):
@@ -403,8 +398,7 @@ class LibdepLinter:
self._raise_libdep_lint_exception(
textwrap.dedent(f"""\
{target_type} '{self.target[0]}' has dependency '{lib}' and is marked explicitly as a leaf node,
- and '{lib}' does not exempt itself as an exception to the rule."""
- ))
+ and '{lib}' does not exempt itself as an exception to the rule."""))
@linter_rule
def linter_rule_no_dangling_deps(self, libdep):
@@ -418,8 +412,10 @@ class LibdepLinter:
# Gather the DEPS_DEPENDENTS and store them for a final check to make sure they were
# eventually defined as being built by some builder
libdep_libbuilder = self.target[0].builder.get_name(self.env)
- deps_depends = self._get_deps_dependents_with_types(libdep_libbuilder, Constants.LibdepsDependents)
- deps_depends += self._get_deps_dependents_with_types("Program", Constants.ProgdepsDependents)
+ deps_depends = self._get_deps_dependents_with_types(libdep_libbuilder,
+ Constants.LibdepsDependents)
+ deps_depends += self._get_deps_dependents_with_types("Program",
+ Constants.ProgdepsDependents)
self.__class__.dangling_dep_dependents.update(deps_depends)
@linter_final_check
@@ -434,8 +430,7 @@ class LibdepLinter:
textwrap.dedent(f"""\
Found reverse dependency linked to node '{dep_node}'
which will never be built by any builder.
- Remove the reverse dependency or add a way to build it."""
- ))
+ Remove the reverse dependency or add a way to build it."""))
@linter_rule
def linter_rule_no_public_deps(self, libdep):
@@ -458,8 +453,7 @@ class LibdepLinter:
textwrap.dedent(f"""\
{target_type} '{self.target[0]}' has public dependency '{lib}'
while being marked as not allowed to have public dependencies
- and '{lib}' does not exempt itself."""
- ))
+ and '{lib}' does not exempt itself."""))
@linter_rule
def linter_rule_no_dups(self, libdep):
@@ -475,8 +469,7 @@ class LibdepLinter:
target_type = self.target[0].builder.get_name(self.env)
lib = os.path.basename(str(libdep))
self._raise_libdep_lint_exception(
- f"{target_type} '{self.target[0]}' links '{lib}' multiple times."
- )
+ f"{target_type} '{self.target[0]}' links '{lib}' multiple times.")
self.unique_libs.add(str(libdep))
@@ -513,15 +506,14 @@ class LibdepLinter:
return
if (self.target[0].builder.get_name(self.env) == "Program"
- and libdep.dependency_type not in (deptype.Global, deptype.Public)):
+ and libdep.dependency_type not in (deptype.Global, deptype.Public)):
lib = os.path.basename(str(libdep))
self._raise_libdep_lint_exception(
textwrap.dedent(f"""\
Program '{self.target[0]}' links non-public library '{lib}'
A 'Program' can only have {Constants.Libdeps} libs,
- not {Constants.LibdepsPrivate} or {Constants.LibdepsInterface}."""
- ))
+ not {Constants.LibdepsPrivate} or {Constants.LibdepsInterface}."""))
@linter_rule
def linter_rule_no_bidirectional_deps(self, libdep):
@@ -540,12 +532,13 @@ class LibdepLinter:
return
elif len(self._get_deps_dependents(libdep.target_node.env)) > 0:
- target_type = self.target[0].builder.get_name(self.env)
- lib = os.path.basename(str(libdep))
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ target_type = self.target[0].builder.get_name(self.env)
+ lib = os.path.basename(str(libdep))
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
{target_type} '{self.target[0]}' links directly to a reverse dependency node '{lib}'
No node can link directly to a node that has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}."""
- ))
+ ))
@linter_rule
def linter_rule_nonprivate_on_deps_dependents(self, libdep):
@@ -560,14 +553,15 @@ class LibdepLinter:
return
if (libdep.dependency_type != deptype.Private and libdep.dependency_type != deptype.Global
- and len(self._get_deps_dependents()) > 0):
+ and len(self._get_deps_dependents()) > 0):
target_type = self.target[0].builder.get_name(self.env)
lib = os.path.basename(str(libdep))
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
{target_type} '{self.target[0]}' links non-private libdep '{lib}' and has a reverse dependency.
A {target_type} can only have {Constants.LibdepsPrivate} depends if it has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}."""
- ))
+ ))
@linter_rule
def linter_rule_libdeps_must_be_list(self, libdep):
@@ -581,7 +575,8 @@ class LibdepLinter:
libdeps_vars = list(dep_type_to_env_var.values()) + [
Constants.LibdepsDependents,
- Constants.ProgdepsDependents]
+ Constants.ProgdepsDependents,
+ ]
for dep_type_val in libdeps_vars:
@@ -589,10 +584,11 @@ class LibdepLinter:
if not SCons.Util.is_List(libdeps_list):
target_type = self.target[0].builder.get_name(self.env)
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
Found non-list type '{libdeps_list}' while evaluating {dep_type_val[1]} for {target_type} '{self.target[0]}'
- {dep_type_val[1]} must be setup as a list."""
- ))
+ {dep_type_val[1]} must be setup as a list."""))
+
dependency_visibility_ignored = {
deptype.Global: deptype.Public,
@@ -615,6 +611,7 @@ dep_type_to_env_var = {
deptype.Private: Constants.LibdepsPrivate,
}
+
class DependencyCycleError(SCons.Errors.UserError):
"""Exception representing a cycle discovered in library dependencies."""
@@ -623,16 +620,17 @@ class DependencyCycleError(SCons.Errors.UserError):
self.cycle_nodes = [first_node]
def __str__(self):
- return "Library dependency cycle detected: " + " => ".join(
- str(n) for n in self.cycle_nodes
- )
+ return "Library dependency cycle detected: " + " => ".join(str(n) for n in self.cycle_nodes)
+
class LibdepLinterError(SCons.Errors.UserError):
"""Exception representing a discongruent usages of libdeps"""
+
class MissingSyslibdepError(SCons.Errors.UserError):
"""Exception representing a discongruent usages of libdeps"""
+
def _get_sorted_direct_libdeps(node):
direct_sorted = getattr(node.attributes, "libdeps_direct_sorted", None)
if direct_sorted is None:
@@ -808,9 +806,8 @@ def update_scanner(env, builder_name=None, debug=False):
print('\n')
return result
- builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
- )
+ builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
+ path_function=path_function)
def get_libdeps(source, target, env, for_signature, debug=False):
@@ -853,7 +850,8 @@ def get_syslibdeps(source, target, env, for_signature, debug=False, shared=True)
if deps is None:
# Get the syslibdeps for the current node
- deps = target[0].get_env().Flatten(copy.copy(target[0].get_env().get(Constants.SysLibdepsPrivate)) or [])
+ deps = target[0].get_env().Flatten(
+ copy.copy(target[0].get_env().get(Constants.SysLibdepsPrivate)) or [])
deps += target[0].get_env().Flatten(target[0].get_env().get(Constants.SysLibdeps) or [])
for lib in _get_libdeps(target[0]):
@@ -875,12 +873,12 @@ def get_syslibdeps(source, target, env, for_signature, debug=False, shared=True)
continue
if isinstance(syslib, str) and syslib.startswith(Constants.MissingLibdep):
- raise MissingSyslibdepError(textwrap.dedent(f"""\
+ raise MissingSyslibdepError(
+ textwrap.dedent(f"""\
LibdepsError:
Target '{str(target[0])}' depends on the availability of a
system provided library for '{syslib[len(Constants.MissingLibdep):]}',
- but no suitable library was found during configuration."""
- ))
+ but no suitable library was found during configuration."""))
deps.append(syslib)
@@ -946,15 +944,19 @@ def _get_node_with_ixes(env, node, node_builder_type):
node_with_ixes = SCons.Util.adjustixes(node, prefix, suffix)
return node_factory(node_with_ixes)
+
_get_node_with_ixes.node_type_ixes = dict()
+
def add_node_from(env, node):
env.GetLibdepsGraph().add_nodes_from([(
str(node.abspath),
{
NodeProps.bin_type.name: node.builder.get_name(env),
- })])
+ },
+ )])
+
def add_edge_from(env, from_node, to_node, visibility, direct):
@@ -963,8 +965,10 @@ def add_edge_from(env, from_node, to_node, visibility, direct):
to_node,
{
EdgeProps.direct.name: direct,
- EdgeProps.visibility.name: int(visibility)
- })])
+ EdgeProps.visibility.name: int(visibility),
+ },
+ )])
+
def add_libdeps_node(env, target, libdeps):
@@ -979,7 +983,8 @@ def add_libdeps_node(env, target, libdeps):
str(node.abspath),
str(libdep.target_node.abspath),
visibility=libdep.dependency_type,
- direct=True)
+ direct=True,
+ )
def get_libdeps_nodes(env, target, builder, debug=False, visibility_map=None):
@@ -1027,7 +1032,8 @@ def get_libdeps_nodes(env, target, builder, debug=False, visibility_map=None):
return libdeps
-def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_map=None, ignore_progdeps=False):
+def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_map=None,
+ ignore_progdeps=False):
"""SCons emitter that takes values from the LIBDEPS environment variable and
converts them to File node objects, binding correct path information into
those File objects.
@@ -1094,12 +1100,9 @@ def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_m
visibility = dependent[1]
dependent = dependent[0]
- dependentNode = _get_node_with_ixes(
- env, dependent, builder
- )
- _append_direct_libdeps(
- dependentNode, [dependency(target[0], visibility_map[visibility], dependent)]
- )
+ dependentNode = _get_node_with_ixes(env, dependent, builder)
+ _append_direct_libdeps(dependentNode,
+ [dependency(target[0], visibility_map[visibility], dependent)])
if not ignore_progdeps:
for dependent in env.get(Constants.ProgdepsDependents, []):
@@ -1112,12 +1115,9 @@ def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_m
visibility = dependent[1]
dependent = dependent[0]
- dependentNode = _get_node_with_ixes(
- env, dependent, "Program"
- )
- _append_direct_libdeps(
- dependentNode, [dependency(target[0], visibility_map[visibility], dependent)]
- )
+ dependentNode = _get_node_with_ixes(env, dependent, "Program")
+ _append_direct_libdeps(dependentNode,
+ [dependency(target[0], visibility_map[visibility], dependent)])
return target, source
@@ -1157,7 +1157,7 @@ def expand_libdeps_for_link(source, target, env, for_signature):
# immediately turned back on
for switch_flag in getattr(flagged_libdep.libnode.attributes, 'libdeps_switch_flags', []):
if (prev_libdep and switch_flag['on'] in flagged_libdep.prefix_flags
- and switch_flag['off'] in prev_libdep.postfix_flags):
+ and switch_flag['off'] in prev_libdep.postfix_flags):
flagged_libdep.prefix_flags.remove(switch_flag['on'])
prev_libdep.postfix_flags.remove(switch_flag['off'])
@@ -1179,6 +1179,7 @@ def expand_libdeps_for_link(source, target, env, for_signature):
return libdeps_with_flags
+
def generate_libdeps_graph(env):
if env.get('SYMBOLDEPSSUFFIX', None):
@@ -1196,7 +1197,8 @@ def generate_libdeps_graph(env):
str(target_node.abspath),
str(direct_libdep.target_node.abspath),
visibility=int(direct_libdep.dependency_type),
- direct=True)
+ direct=True,
+ )
direct_libdeps.append(direct_libdep.target_node.abspath)
for libdep in _get_libdeps(target_node):
@@ -1207,45 +1209,58 @@ def generate_libdeps_graph(env):
str(target_node.abspath),
str(libdep.abspath),
visibility=int(deptype.Public),
- direct=False)
+ direct=False,
+ )
if env['PLATFORM'] == 'darwin':
sep = ' '
else:
sep = ':'
- ld_path = sep.join([os.path.dirname(str(libdep)) for libdep in _get_libdeps(target_node)])
- symbol_deps.append(env.Command(
- target=symbols_file,
- source=target_node,
- action=SCons.Action.Action(
- f'{find_symbols} $SOURCE "{ld_path}" $TARGET',
- "Generating $SOURCE symbol dependencies" if not env['VERBOSE'] else "")))
+ ld_path = sep.join(
+ [os.path.dirname(str(libdep)) for libdep in _get_libdeps(target_node)])
+ symbol_deps.append(
+ env.Command(
+ target=symbols_file,
+ source=target_node,
+ action=SCons.Action.Action(
+ f'{find_symbols} $SOURCE "{ld_path}" $TARGET',
+ "Generating $SOURCE symbol dependencies" if not env['VERBOSE'] else ""),
+ ))
def write_graph_hash(env, target, source):
with open(target[0].path, 'w') as f:
- json_str = json.dumps(networkx.readwrite.json_graph.node_link_data(env.GetLibdepsGraph()), sort_keys=True).encode('utf-8')
+ json_str = json.dumps(
+ networkx.readwrite.json_graph.node_link_data(env.GetLibdepsGraph()),
+ sort_keys=True).encode('utf-8')
f.write(hashlib.sha256(json_str).hexdigest())
- graph_hash = env.Command(target="$BUILD_DIR/libdeps/graph_hash.sha256",
- source=symbol_deps,
- action=SCons.Action.FunctionAction(
- write_graph_hash,
- {"cmdstr": None}))
- env.Depends(graph_hash, [
- env.File("#SConstruct")] +
- glob.glob("**/SConscript", recursive=True) +
- [os.path.abspath(__file__),
- env.File('$BUILD_DIR/mongo/util/version_constants.h')])
+ graph_hash = env.Command(
+ target="$BUILD_DIR/libdeps/graph_hash.sha256",
+ source=symbol_deps,
+ action=SCons.Action.FunctionAction(
+ write_graph_hash,
+ {"cmdstr": None},
+ ),
+ )
+ env.Depends(
+ graph_hash,
+ [env.File("#SConstruct")] + glob.glob("**/SConscript", recursive=True) +
+ [os.path.abspath(__file__),
+ env.File('$BUILD_DIR/mongo/util/version_constants.h')],
+ )
graph_node = env.Command(
target=env.get('LIBDEPS_GRAPH_FILE', None),
source=symbol_deps,
action=SCons.Action.FunctionAction(
generate_graph,
- {"cmdstr": "Generating libdeps graph"}))
+ {"cmdstr": "Generating libdeps graph"},
+ ),
+ )
env.Depends(graph_node, [graph_hash] + env.Glob("#buildscripts/libdeps/libdeps/*"))
+
def generate_graph(env, target, source):
libdeps_graph = env.GetLibdepsGraph()
@@ -1270,7 +1285,8 @@ def generate_graph(env, target, source):
libdeps_graph.add_edges_from([(
from_node,
to_node,
- {EdgeProps.symbols.name: " ".join(symbols[libdep]) })])
+ {EdgeProps.symbols.name: " ".join(symbols[libdep])},
+ )])
node = env.File(str(symbol_deps_file)[:-len(env['SYMBOLDEPSSUFFIX'])])
add_node_from(env, node)
@@ -1305,7 +1321,8 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
# configured.
env['LIBDEPS_GRAPH_ALIAS'] = env.Alias(
'generate-libdeps-graph',
- "${BUILD_DIR}/libdeps/libdeps.graphml")[0]
+ "${BUILD_DIR}/libdeps/libdeps.graphml",
+ )[0]
if str(env['LIBDEPS_GRAPH_ALIAS']) in COMMAND_LINE_TARGETS:
@@ -1323,24 +1340,26 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
if not env.WhereIs(bin):
env.FatalError(f"'{bin}' not found, Libdeps graph generation requires {bin}.")
-
# The find_symbols binary is a small fast C binary which will extract the missing
# symbols from the target library, and discover what linked libraries supply it. This
# setups the binary to be built.
find_symbols_env = env.Clone()
- find_symbols_env.VariantDir('${BUILD_DIR}/libdeps', 'buildscripts/libdeps', duplicate = 0)
+ find_symbols_env.VariantDir('${BUILD_DIR}/libdeps', 'buildscripts/libdeps', duplicate=0)
find_symbols_node = find_symbols_env.Program(
target='${BUILD_DIR}/libdeps/find_symbols',
source=['${BUILD_DIR}/libdeps/find_symbols.c'],
- CFLAGS=['-O3'])
+ CFLAGS=['-O3'],
+ )
# Here we are setting up some functions which will return single instance of the
# network graph and symbol deps list. We also setup some environment variables
# which are used along side the functions.
symbol_deps = []
+
def append_symbol_deps(env, symbol_deps_file):
env.Depends(env['LIBDEPS_GRAPH_FILE'], symbol_deps_file[0])
symbol_deps.append(symbol_deps_file)
+
env.AddMethod(append_symbol_deps, "AppendSymbolDeps")
env['LIBDEPS_SYMBOL_DEP_FILES'] = symbol_deps
@@ -1349,14 +1368,19 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
env["SYMBOLDEPSSUFFIX"] = '.symbol_deps'
libdeps_graph = LibdepsGraph()
- libdeps_graph.graph['invocation'] = " ".join([env['ESCAPE'](str(sys.executable))] + [env['ESCAPE'](arg) for arg in sys.argv])
+ libdeps_graph.graph['invocation'] = " ".join([env['ESCAPE'](str(sys.executable))] +
+ [env['ESCAPE'](arg) for arg in sys.argv])
libdeps_graph.graph['git_hash'] = env['MONGO_GIT_HASH']
libdeps_graph.graph['graph_schema_version'] = env['LIBDEPS_GRAPH_SCHEMA_VERSION']
libdeps_graph.graph['build_dir'] = env.Dir('$BUILD_DIR').path
- libdeps_graph.graph['deptypes'] = json.dumps({key: value[0] for key, value in deptype.__members__.items() if isinstance(value, tuple)})
+ libdeps_graph.graph['deptypes'] = json.dumps({
+ key: value[0]
+ for key, value in deptype.__members__.items() if isinstance(value, tuple)
+ })
def get_libdeps_graph(env):
return libdeps_graph
+
env.AddMethod(get_libdeps_graph, "GetLibdepsGraph")
# Now we will setup an emitter, and an additional action for several
@@ -1365,7 +1389,7 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
if "conftest" not in str(target[0]):
symbol_deps_file = env.File(str(target[0]) + env['SYMBOLDEPSSUFFIX'])
env.Depends(symbol_deps_file, '${BUILD_DIR}/libdeps/find_symbols')
- env.AppendSymbolDeps((symbol_deps_file,target[0]))
+ env.AppendSymbolDeps((symbol_deps_file, target[0]))
return target, source
@@ -1375,40 +1399,40 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
new_emitter = SCons.Builder.ListEmitter([base_emitter, libdeps_graph_emitter])
builder.emitter = new_emitter
-
env.Append(
LIBDEPS_LIBEMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="StaticLibrary"),
+ builder="StaticLibrary",
+ ),
LIBEMITTER=lambda target, source, env: env["LIBDEPS_LIBEMITTER"](target, source, env),
LIBDEPS_SHAREMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="SharedArchive", ignore_progdeps=True),
+ builder="SharedArchive",
+ ignore_progdeps=True,
+ ),
SHAREMITTER=lambda target, source, env: env["LIBDEPS_SHAREMITTER"](target, source, env),
LIBDEPS_SHLIBEMITTER=partial(
libdeps_emitter,
debug=debug,
builder="SharedLibrary",
- visibility_map=dependency_visibility_honored
+ visibility_map=dependency_visibility_honored,
),
SHLIBEMITTER=lambda target, source, env: env["LIBDEPS_SHLIBEMITTER"](target, source, env),
LIBDEPS_PROGEMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="SharedLibrary" if emitting_shared else "StaticLibrary"
+ builder="SharedLibrary" if emitting_shared else "StaticLibrary",
),
PROGEMITTER=lambda target, source, env: env["LIBDEPS_PROGEMITTER"](target, source, env),
)
env["_LIBDEPS_LIBS_FOR_LINK"] = expand_libdeps_for_link
- env["_LIBDEPS_LIBS"] = (
- "$LINK_LIBGROUP_START "
- "$_LIBDEPS_LIBS_FOR_LINK "
- "$LINK_LIBGROUP_END "
- )
+ env["_LIBDEPS_LIBS"] = ("$LINK_LIBGROUP_START "
+ "$_LIBDEPS_LIBS_FOR_LINK "
+ "$LINK_LIBGROUP_END ")
env.Prepend(_LIBFLAGS="$_LIBDEPS_TAGS $_LIBDEPS $_SYSLIBDEPS ")
for builder_name in ("Program", "SharedLibrary", "LoadableModule", "SharedArchive"):
diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py
index c8714cf68dd..cc744517b74 100644
--- a/site_scons/mongo/__init__.py
+++ b/site_scons/mongo/__init__.py
@@ -4,11 +4,13 @@
import bisect
+
def print_build_failures():
from SCons.Script import GetBuildFailures
for bf in GetBuildFailures():
print("%s failed: %s" % (bf.node, bf.errstr))
+
def insort_wrapper(target_list, target_string):
"""
Removes instances of empty list inside the list before handing it to insort.
diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py
index e2b401a5eae..da166ad875f 100644
--- a/site_scons/mongo/generators.py
+++ b/site_scons/mongo/generators.py
@@ -4,6 +4,7 @@ import hashlib
# Default and alternative generator definitions go here.
+
# This is the key/value mapping that will be returned by the buildInfo command and
# printed by the --version command-line option to mongod.
# Each mapped value is in turn a dict consisting of:
@@ -77,7 +78,7 @@ def default_buildinfo_environment_data():
),
)
return {
- k:{'key': k, 'value': v, 'inBuildInfo': ibi, 'inVersion': iv}
+ k: {'key': k, 'value': v, 'inBuildInfo': ibi, 'inVersion': iv}
for k, v, ibi, iv in data
}
@@ -109,11 +110,11 @@ def default_variant_dir_generator(target, source, env, for_signature):
# If our option hash yields a well known hash, replace it with its name.
known_variant_hashes = {
- '343e6678' : 'debug',
- '85fcf9b0' : 'opt',
- '981ce870' : 'debug',
- '9fface73' : 'optdebug',
- 'c52b1cc3' : 'opt',
+ '343e6678': 'debug',
+ '85fcf9b0': 'opt',
+ '981ce870': 'debug',
+ '9fface73': 'optdebug',
+ 'c52b1cc3': 'opt',
}
return known_variant_hashes.get(variant_dir, variant_dir)
@@ -122,4 +123,5 @@ def default_variant_dir_generator(target, source, env, for_signature):
def os_specific_variant_dir_generator(target, source, env, for_signature):
return '-'.join([
env['TARGET_OS'],
- default_variant_dir_generator(target, source, env, for_signature)])
+ default_variant_dir_generator(target, source, env, for_signature),
+ ])
diff --git a/site_scons/mongo/install_actions.py b/site_scons/mongo/install_actions.py
index c0eeac3a84e..8f3743299d3 100644
--- a/site_scons/mongo/install_actions.py
+++ b/site_scons/mongo/install_actions.py
@@ -5,30 +5,34 @@ import shutil
import stat
-
def _copy(src, dst):
shutil.copy2(src, dst)
st = os.stat(src)
os.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+
def _symlink(src, dst):
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
+
def _hardlink(src, dst):
try:
os.link(src, dst)
except:
_copy(src, dst)
+
available_actions = {
- "copy" : _copy,
- "hardlink" : _hardlink,
- "symlink" : _symlink,
+ "copy": _copy,
+ "hardlink": _hardlink,
+ "symlink": _symlink,
}
+
class _CopytreeError(EnvironmentError):
pass
+
def _generate_install_actions(base_action):
# This is a patched version of shutil.copytree from python 2.5. It
@@ -81,7 +85,6 @@ def _generate_install_actions(base_action):
if errors:
raise _CopytreeError(errors)
-
#
# Functions doing the actual work of the Install Builder.
#
@@ -92,7 +95,9 @@ def _generate_install_actions(base_action):
if os.path.isdir(source):
if os.path.exists(dest):
if not os.path.isdir(dest):
- raise SCons.Errors.UserError("cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source)))
+ raise SCons.Errors.UserError(
+ "cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest),
+ str(source)))
else:
parent = os.path.split(dest)[0]
if not os.path.exists(parent):
@@ -112,7 +117,8 @@ def _generate_install_actions(base_action):
required symlinks."""
if os.path.isdir(source):
- raise SCons.Errors.UserError("cannot install directory `%s' as a version library" % str(source) )
+ raise SCons.Errors.UserError(
+ "cannot install directory `%s' as a version library" % str(source))
else:
# remove the link if it is already there
try:
diff --git a/site_scons/mongo/pip_requirements.py b/site_scons/mongo/pip_requirements.py
index 5fd9b947b02..e7963b5a69a 100644
--- a/site_scons/mongo/pip_requirements.py
+++ b/site_scons/mongo/pip_requirements.py
@@ -28,11 +28,9 @@ def verify_requirements(requirements_file: str, silent: bool = False):
print(*args, **kwargs)
def raiseSuggestion(ex, pip_pkg):
- raise MissingRequirements(
- f"{ex}\n"
- f"Try running:\n"
- f" {sys.executable} -m pip install {pip_pkg}"
- ) from ex
+ raise MissingRequirements(f"{ex}\n"
+ f"Try running:\n"
+ f" {sys.executable} -m pip install {pip_pkg}") from ex
# Import the prequisites for this function, providing hints on failure.
try:
@@ -65,8 +63,8 @@ def verify_requirements(requirements_file: str, silent: bool = False):
except pkg_resources.ResolutionError as ex:
raiseSuggestion(
ex,
- f"-r {requirements_file}")
-
+ f"-r {requirements_file}",
+ )
verbose("Resolved to these distributions:")
for dist in sorted(set([f" {dist.key} {dist.version}" for dist in dists])):
diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py
index 3670ec24166..f57f63a711d 100644
--- a/site_scons/site_tools/abilink.py
+++ b/site_scons/site_tools/abilink.py
@@ -71,15 +71,15 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
def _add_action(builder):
actions = builder.action
builder.action = actions + SCons.Action.Action(
- "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw"
- )
+ "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw")
def exists(env):
diff --git a/site_scons/site_tools/auto_archive.py b/site_scons/site_tools/auto_archive.py
index b3c9ddd99a4..91cd0c282c7 100644
--- a/site_scons/site_tools/auto_archive.py
+++ b/site_scons/site_tools/auto_archive.py
@@ -76,9 +76,7 @@ def add_package_name_alias(env, component, role, name):
"""Add a package name mapping for the combination of component and role."""
# Verify we didn't get a None or empty string for any argument
if not name:
- raise Exception(
- "when setting a package name alias must provide a name parameter"
- )
+ raise Exception("when setting a package name alias must provide a name parameter")
if not component:
raise Exception("No component provided for package name alias")
if not role:
@@ -90,7 +88,8 @@ def get_package_name(env, component, role):
"""Return the package file name for the component and role combination."""
basename = env[PACKAGE_ALIAS_MAP].get(
# TODO: silent roles shouldn't be included here
- (component, role), "{component}-{role}".format(component=component, role=role)
+ (component, role),
+ "{component}-{role}".format(component=component, role=role),
)
return basename
@@ -234,11 +233,7 @@ def archive_builder(source, target, env, for_signature):
# Collect all the installed files for our entry. This is doing a pure DAG
# walk idea of what should be. So we filter out any that are not in the
# installed set.
- transitive_files = [
- f for f in
- collect_transitive_files(env, entry)
- if f in installed
- ]
+ transitive_files = [f for f in collect_transitive_files(env, entry) if f in installed]
if not transitive_files:
return []
@@ -258,7 +253,7 @@ def archive_builder(source, target, env, for_signature):
return "{prefix} {files}".format(
prefix=command_prefix,
- files=" ".join(relative_files)
+ files=" ".join(relative_files),
)
@@ -274,11 +269,11 @@ def generate(env):
action=SCons.Action.CommandGeneratorAction(
archive_builder,
{"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"},
- )
- )
+ ))
env.Append(BUILDERS={"AutoArchive": bld})
env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = env.get(
- "AUTO_ARCHIVE_TARBALL_SUFFIX", "tar.gz"
+ "AUTO_ARCHIVE_TARBALL_SUFFIX",
+ "tar.gz",
)
env["AUTO_ARCHIVE_ZIP_SUFFIX"] = env.get("AUTO_ARCHIVE_ZIP_SUFFIX", "zip")
env[PACKAGE_ALIAS_MAP] = {}
@@ -297,5 +292,4 @@ def generate(env):
"tar": (auto_archive_gen(env, make_archive_script, "tar"), False),
"zip": (auto_archive_gen(env, make_archive_script, "zip"), False),
"archive": (auto_archive_gen(env, make_archive_script, "auto"), False),
- }
- )
+ })
diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py
index c6429ad396f..55488465d0e 100644
--- a/site_scons/site_tools/auto_install_binaries.py
+++ b/site_scons/site_tools/auto_install_binaries.py
@@ -42,8 +42,10 @@ ROLE_DECLARATIONS = "AIB_ROLE_DECLARATIONS"
SUFFIX_MAP = "AIB_SUFFIX_MAP"
TASKS = "AIB_TASKS"
-
-SuffixMap = namedtuple("SuffixMap", ["directory", "default_role"],)
+SuffixMap = namedtuple(
+ "SuffixMap",
+ ["directory", "default_role"],
+)
class RoleInfo:
@@ -98,24 +100,17 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
for role in roles:
for d in role.dependencies:
if d not in role_names:
- raise Exception(
- "Role dependency '{}' does not name a declared role".format(d)
- )
+ raise Exception("Role dependency '{}' does not name a declared role".format(d))
if isinstance(base_role, str):
if base_role not in role_names:
raise Exception(
- "A base_role argument was provided but it does not name a declared role"
- )
+ "A base_role argument was provided but it does not name a declared role")
elif isinstance(base_role, DeclaredRole):
if base_role not in roles:
- raise Exception(
- "A base_role argument was provided but it is not a declared role"
- )
+ raise Exception("A base_role argument was provided but it is not a declared role")
elif base_role is not None:
- raise Exception(
- "The base_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The base_role argument must be a string name of a role or a role object")
else:
# Set it to something falsey
base_role = str()
@@ -123,17 +118,12 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
if isinstance(meta_role, str):
if meta_role not in role_names:
raise Exception(
- "A meta_role argument was provided but it does not name a declared role"
- )
+ "A meta_role argument was provided but it does not name a declared role")
elif isinstance(meta_role, DeclaredRole):
if meta_role not in roles:
- raise Exception(
- "A meta_role argument was provided but it is not a declared role"
- )
+ raise Exception("A meta_role argument was provided but it is not a declared role")
elif meta_role is not None:
- raise Exception(
- "The meta_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The meta_role argument must be a string name of a role or a role object")
else:
# Set it to something falsy
meta_role = str()
@@ -199,12 +189,7 @@ def get_alias_map_entry(env, component, role):
r_entry.dependencies.add(base_c_entry)
meta_role = env.get(META_ROLE)
- if (
- meta_role
- and role != meta_role
- and meta_component
- and component != meta_component
- ):
+ if (meta_role and role != meta_role and meta_component and component != meta_component):
meta_r_entry = get_alias_map_entry(env, component, meta_role)
meta_c_r_entry = get_alias_map_entry(env, meta_component, meta_role)
meta_c_r_entry.dependencies.add(meta_r_entry)
@@ -259,23 +244,15 @@ def scan_for_transitive_install(node, env, _path):
if component_base_entry.files:
results.update(component_base_entry.files)
- if (
- base_role
- and base_component
- and component != base_component
- and role != base_role
- ):
+ if (base_role and base_component and component != base_component and role != base_role):
base_base_entry = alias_map[base_component][base_role]
if base_base_entry.files:
results.update(base_base_entry.files)
- installed_children = set(
- grandchild
- for child in node.children()
- for direct_children in child.children()
- for grandchild in direct_children.get_executor().get_all_targets()
- if direct_children.get_executor() and grandchild.has_builder()
- )
+ installed_children = set(grandchild for child in node.children()
+ for direct_children in child.children()
+ for grandchild in direct_children.get_executor().get_all_targets()
+ if direct_children.get_executor() and grandchild.has_builder())
for child in installed_children:
auto_installed_files = get_auto_installed_files(env, child)
@@ -324,11 +301,8 @@ def tag_components(env, target, **kwargs):
raise Exception("AIB_COMPONENT must be a string and contain no whitespace.")
if component is None:
- raise Exception(
- "AIB_COMPONENT must be provided; untagged targets: {}".format(
- [t.path for t in target]
- )
- )
+ raise Exception("AIB_COMPONENT must be provided; untagged targets: {}".format(
+ [t.path for t in target]))
if role is None:
raise Exception("AIB_ROLE was not provided.")
@@ -344,11 +318,8 @@ def tag_components(env, target, **kwargs):
# component or base component. These cause dependency cycles because
# get_alias_map_entry will do that wiring for us then we will try to
# map them back on themselves in our loop.
- if (
- component != env.get(BASE_COMPONENT)
- and role != env.get(META_ROLE)
- and component != env.get(META_COMPONENT)
- ):
+ if (component != env.get(BASE_COMPONENT) and role != env.get(META_ROLE)
+ and component != env.get(META_COMPONENT)):
for component in kwargs.get(REVERSE_COMPONENT_DEPENDENCIES, []):
component_dep = get_alias_map_entry(env, component, role)
component_dep.dependencies.add(entry)
@@ -386,9 +357,7 @@ def auto_install_pseudobuilder(env, target, source, **kwargs):
auto_install_mapping = env[SUFFIX_MAP].get(suffix)
if not auto_install_mapping:
- raise Exception(
- "No target provided and no auto install mapping found for:", str(s)
- )
+ raise Exception("No target provided and no auto install mapping found for:", str(s))
target_for_source = auto_install_mapping.directory
@@ -449,14 +418,10 @@ def finalize_install_dependencies(env):
alias_name = generate_alias_name(env, component, role, task)
alias = env.Alias(alias_name, func(env, component, role))
if generate_dependent_aliases:
- dependent_aliases = env.Flatten(
- [
- env.Alias(
- generate_alias_name(env, d.component, d.role, task)
- )
- for d in info.dependencies
- ]
- )
+ dependent_aliases = env.Flatten([
+ env.Alias(generate_alias_name(env, d.component, d.role, task))
+ for d in info.dependencies
+ ])
env.Alias(alias, dependent_aliases)
@@ -499,11 +464,8 @@ def add_suffix_mapping(env, suffix, role=None):
"""Map suffix to role"""
if isinstance(suffix, str):
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role available roles are {}".format(
- role, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role available roles are {}".format(
+ role, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP][env.subst(suffix)] = role
if not isinstance(suffix, dict):
@@ -512,11 +474,8 @@ def add_suffix_mapping(env, suffix, role=None):
for _, mapping in suffix.items():
role = mapping.default_role
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role. Available roles are {}".format(
- target, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role. Available roles are {}".format(
+ target, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP].update({env.subst(key): value for key, value in suffix.items()})
@@ -536,6 +495,7 @@ def list_components(env, **kwargs):
for key in env[ALIAS_MAP]:
print("\t", key)
+
def list_hierarchical_aib_recursive(mapping, counter=0):
if counter == 0:
print(" " * counter, mapping.id)
@@ -582,7 +542,9 @@ def list_targets():
# dedup and sort targets
targets = sorted(list(set(targets)))
- print("The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod")
+ print(
+ "The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod"
+ )
tasks_str = ','.join(tasks)
print(f"TASK={{{tasks_str}}}")
roles_str = ','.join(roles)
@@ -618,14 +580,13 @@ def generate(env): # pylint: disable=too-many-statements
env[SUFFIX_MAP] = {}
env[ALIAS_MAP] = defaultdict(dict)
- env.AppendUnique(
- AIB_TASKS={
- "install": auto_install_task,
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "install": auto_install_task,
+ })
env.AddMethod(
- scan_for_transitive_install_pseudobuilder, "GetTransitivelyInstalledFiles"
+ scan_for_transitive_install_pseudobuilder,
+ "GetTransitivelyInstalledFiles",
)
env.AddMethod(get_role_declaration, "GetRoleDeclaration")
env.AddMethod(get_auto_installed_files, "GetAutoInstalledFiles")
@@ -664,5 +625,6 @@ def generate(env): # pylint: disable=too-many-statements
assert base_install_builder.target_scanner is None
base_install_builder.target_scanner = SCons.Scanner.Scanner(
- function=scan_for_transitive_install, path_function=None
+ function=scan_for_transitive_install,
+ path_function=None,
)
diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py
index 2a4b89015d5..dc7ca4cd1e3 100644
--- a/site_scons/site_tools/ccache.py
+++ b/site_scons/site_tools/ccache.py
@@ -75,7 +75,9 @@ def exists(env):
if validated:
env['CCACHE_VERSION'] = ccache_version
else:
- print(f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}")
+ print(
+ f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}"
+ )
return validated
@@ -147,10 +149,8 @@ def generate(env):
# compiler parameter and differences in the file need to be accounted for in the
# hash result to prevent erroneous cache hits.
if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]:
- env["ENV"]["CCACHE_EXTRAFILES"] = ":".join([
- denyfile.path
- for denyfile in env["CCACHE_EXTRAFILES"]
- ])
+ env["ENV"]["CCACHE_EXTRAFILES"] = ":".join(
+ [denyfile.path for denyfile in env["CCACHE_EXTRAFILES"]])
# Make a generator to expand to CCACHE in the case where we are
# not a conftest. We don't want to use ccache for configure tests
@@ -165,6 +165,7 @@ def generate(env):
if "conftest" not in str(target[0]):
return '$CCACHE'
return ''
+
env['CCACHE_GENERATOR'] = ccache_generator
# Add ccache to the relevant command lines. Wrap the reference to
diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py
index 833be4a7c22..7e26b91d258 100644
--- a/site_scons/site_tools/compilation_db.py
+++ b/site_scons/site_tools/compilation_db.py
@@ -142,7 +142,11 @@ def WriteCompilationDb(target, source, env):
with open(str(target[0]), "w") as target_file:
json.dump(
- entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
+ entries,
+ target_file,
+ sort_keys=True,
+ indent=4,
+ separators=(",", ": "),
)
@@ -155,7 +159,8 @@ def generate(env, **kwargs):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
env["COMPILATIONDB_COMSTR"] = kwargs.get(
- "COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
+ "COMPILATIONDB_COMSTR",
+ "Building compilation database $TARGET",
)
components_by_suffix = itertools.chain(
@@ -181,18 +186,19 @@ def generate(env, **kwargs):
# Assumes a dictionary emitter
emitter = builder.emitter[suffix]
- builder.emitter[suffix] = SCons.Builder.ListEmitter(
- [emitter, makeEmitCompilationDbEntry(command),]
- )
+ builder.emitter[suffix] = SCons.Builder.ListEmitter([
+ emitter,
+ makeEmitCompilationDbEntry(command),
+ ])
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
- action=SCons.Action.Action(CompilationDbEntryAction, None),
- )
+ action=SCons.Action.Action(CompilationDbEntryAction, None), )
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
target_scanner=SCons.Scanner.Scanner(
- function=ScanCompilationDb, node_class=None
+ function=ScanCompilationDb,
+ node_class=None,
),
)
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
index 95200775bfd..83f47f2ab3f 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -61,7 +61,10 @@ class DistSrcArchive:
)
elif filename.endswith("zip"):
return DistSrcZipArchive(
- "zip", zipfile.ZipFile(filename, "a"), filename, "a",
+ "zip",
+ zipfile.ZipFile(filename, "a"),
+ filename,
+ "a",
)
def close(self):
@@ -89,13 +92,13 @@ class DistSrcTarArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -109,7 +112,9 @@ class DistSrcTarArchive(DistSrcArchive):
if self.archive_mode == "r":
self.archive_file.close()
self.archive_file = tarfile.open(
- self.archive_name, "a", format=tarfile.PAX_FORMAT,
+ self.archive_name,
+ "a",
+ format=tarfile.PAX_FORMAT,
)
self.archive_mode = "a"
self.archive_file.addfile(file_metadata, fileobj=file_buf)
@@ -141,13 +146,13 @@ class DistSrcZipArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -187,15 +192,14 @@ def distsrc_action_generator(source, target, env, for_signature):
print("Invalid file format for distsrc. Must be tar or zip file")
env.Exit(1)
- git_cmd = (
- '"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD'
- % (git_path, target_ext, target[0])
- )
+ git_cmd = ('"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD' %
+ (git_path, target_ext, target[0]))
return [
SCons.Action.Action(git_cmd, "Running git archive for $TARGET"),
SCons.Action.Action(
- run_distsrc_callbacks, "Running distsrc callbacks for $TARGET"
+ run_distsrc_callbacks,
+ "Running distsrc callbacks for $TARGET",
),
]
@@ -206,9 +210,7 @@ def add_callback(env, fn):
def generate(env, **kwargs):
env.AddMethod(add_callback, "AddDistSrcCallback")
- env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(
- generator=distsrc_action_generator,
- )
+ env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(generator=distsrc_action_generator, )
def DistSrc(env, target, **kwargs):
result = env.__DISTSRC(target=target, source=[], **kwargs)
diff --git a/site_scons/site_tools/forceincludes.py b/site_scons/site_tools/forceincludes.py
index 6d535bf0ba0..7807ca19f7e 100644
--- a/site_scons/site_tools/forceincludes.py
+++ b/site_scons/site_tools/forceincludes.py
@@ -22,6 +22,7 @@
import SCons
+
def _add_scanner(builder):
# We are taking over the target scanner here. If we want to not do
# that we need to invent a ListScanner concept to inject. What if
@@ -35,7 +36,9 @@ def _add_scanner(builder):
# If all nodes could not be resolved, there are missing headers.
if not all(fis):
- missing_headers = [header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node]
+ missing_headers = [
+ header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node
+ ]
errstring = f"Could not find force include header(s): {missing_headers} in any path in CPPPATH:\n"
for cpppath in env.get('CPPPATH', []):
errstring += f"\t{env.Dir(cpppath).path}\n"
@@ -60,6 +63,7 @@ def _add_scanner(builder):
argument=builder.source_scanner,
)
+
def generate(env, **kwargs):
if not 'FORCEINCLUDEPREFIX' in env:
if 'msvc' in env.get('TOOLS', []):
@@ -82,11 +86,11 @@ def generate(env, **kwargs):
# would enable discovery.
CCFLAGS=[
'$_FORCEINCLUDES',
- ]
- )
+ ])
for object_builder in SCons.Tool.createObjBuilders(env):
_add_scanner(object_builder)
+
def exists(env):
return True
diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py
index 0cb219edc5e..b092b743236 100644
--- a/site_scons/site_tools/git_decider.py
+++ b/site_scons/site_tools/git_decider.py
@@ -20,6 +20,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
+
def generate(env, **kwargs):
# Grab the existing decider functions out of the environment
diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py
index 6d6b0099f97..8f136642dd4 100644
--- a/site_scons/site_tools/gziptool.py
+++ b/site_scons/site_tools/gziptool.py
@@ -34,10 +34,13 @@ def GZipAction(target, source, env, **kw):
def generate(env, **kwargs):
env["BUILDERS"]["__GZIPTOOL"] = SCons.Builder.Builder(
- action=SCons.Action.Action(GZipAction, "$GZIPTOOL_COMSTR")
- )
+ action=SCons.Action.Action(
+ GZipAction,
+ "$GZIPTOOL_COMSTR",
+ ))
env["GZIPTOOL_COMSTR"] = kwargs.get(
- "GZIPTOOL_COMSTR", "Compressing $TARGET with gzip"
+ "GZIPTOOL_COMSTR",
+ "Compressing $TARGET with gzip",
)
def GZipTool(env, target, source, **kwargs):
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index c95a542f008..f7ce0ecd23c 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -87,9 +87,8 @@ def generate(env):
# icecc lower then 1.1 supports addfile remapping accidentally
# and above it adds an empty cpuinfo so handle cpuinfo issues for icecream
# below version 1.1
- if (env['ICECREAM_VERSION'] <= parse_version('1.1')
- and env.ToolchainIs("clang")
- and os.path.exists('/proc/cpuinfo')):
+ if (env['ICECREAM_VERSION'] <= parse_version('1.1') and env.ToolchainIs("clang")
+ and os.path.exists('/proc/cpuinfo')):
env.AppendUnique(ICECC_CREATE_ENV_ADDFILES=[('/proc/cpuinfo', '/dev/null')])
# Absoluteify, so we can derive ICERUN
@@ -115,9 +114,10 @@ def generate(env):
env["CXX"] = env.WhereIs("$CXX")
# Set up defaults for configuration options
- env['ICECREAM_TARGET_DIR'] = env.Dir(
- env.get('ICECREAM_TARGET_DIR', '#./.icecream')
- )
+ env['ICECREAM_TARGET_DIR'] = env.Dir(env.get(
+ 'ICECREAM_TARGET_DIR',
+ '#./.icecream',
+ ), )
verbose = env.get('ICECREAM_VERBOSE', False)
env['ICECC_DEBUG'] = env.get('ICECC_DEBUG', False)
@@ -125,9 +125,7 @@ def generate(env):
# environment doesn't need to see or know about. Make a custom env
# that we use consistently from here to where we end up setting
# ICECREAM_RUN_ICECC in the user env.
- setupEnv = env.Clone(
- NINJA_SKIP=True
- )
+ setupEnv = env.Clone(NINJA_SKIP=True)
if 'ICECC_VERSION' in setupEnv and bool(setupEnv['ICECC_VERSION']):
@@ -161,7 +159,8 @@ def generate(env):
source=[setupEnv.Value(quoted)],
action=SCons.Action.Action(
f"{cmdstr} -o $TARGET $ICECC_VERSION_URL",
- "Downloading compiler package from $ICECC_VERSION_URL" if not verbose else str(),
+ "Downloading compiler package from $ICECC_VERSION_URL"
+ if not verbose else str(),
),
)[0]
@@ -171,8 +170,8 @@ def generate(env):
if not icecc_version_file.exists():
raise Exception(
- 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
- )
+ 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(
+ icecc_version_file, ))
# This is what we are going to call the file names as known to SCons on disk
setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name
@@ -180,27 +179,27 @@ def generate(env):
else:
setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get(
- "ICECC_COMPILER_TYPE", os.path.basename(setupEnv.WhereIs("${CC}"))
+ "ICECC_COMPILER_TYPE",
+ os.path.basename(setupEnv.WhereIs("${CC}")),
)
# This is what we are going to call the file names as known to SCons on disk. We do the
# subst early so that we can call `replace` on the result.
setupEnv["ICECC_VERSION_ID"] = setupEnv.subst(
- "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_"
- )
+ "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_")
setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID",
source=[
"$ICECC_CREATE_ENV",
"$CC",
- "$CXX"
+ "$CXX",
],
action=SCons.Action.Action(
icecc_create_env,
"Generating icecream compiler package: $TARGET" if not verbose else str(),
generator=True,
- )
+ ),
)[0]
# At this point, all paths above have produced a file of some sort. We now move on
@@ -234,38 +233,37 @@ def generate(env):
# file as found on the users filesystem or from
# icecc-create-env. We put the absolute path to that filename into
# a file that we can read from.
- icecc_version_info = setupEnv.File(setupEnv.Command(
- target=[
- '${ICECREAM_TARGET_BASE}.sha256',
- '${ICECREAM_TARGET_BASE}.sha256.path',
- ],
- source=icecc_version_file,
- action=SCons.Action.ListAction(
- [
-
- # icecc-create-env run twice with the same input will
- # create files with identical contents, and identical
- # filenames, but with different hashes because it
- # includes timestamps. So we compute a new hash based
- # on the actual stream contents of the file by
- # untarring it into shasum.
- SCons.Action.Action(
- "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
- "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
- "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
- "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" if not verbose else str(),
- )
+ icecc_version_info = setupEnv.File(
+ setupEnv.Command(
+ target=[
+ '${ICECREAM_TARGET_BASE}.sha256',
+ '${ICECREAM_TARGET_BASE}.sha256.path',
],
- )
- ))
+ source=icecc_version_file,
+ action=SCons.Action.ListAction(
+ [
+
+ # icecc-create-env run twice with the same input will
+ # create files with identical contents, and identical
+ # filenames, but with different hashes because it
+ # includes timestamps. So we compute a new hash based
+ # on the actual stream contents of the file by
+ # untarring it into shasum.
+ SCons.Action.Action(
+ "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
+ "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
+ "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
+ "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}"
+ if not verbose else str(),
+ ),
+ ], ),
+ ), )
# We can't allow these to interact with the cache because the
# second action produces a file unknown to SCons. If caching were
@@ -280,13 +278,11 @@ def generate(env):
# wrapper script.
icecc_version_string_value = setupEnv.Command(
target=setupEnv.Value(None),
- source=[
- icecc_version_info[1]
- ],
+ source=[icecc_version_info[1]],
action=SCons.Action.Action(
lambda env, target, source: target[0].write(source[0].get_text_contents()),
"Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(),
- )
+ ),
)[0]
def icecc_version_string_generator(source, target, env, for_signature):
@@ -319,9 +315,9 @@ def generate(env):
'',
],
SUBST_DICT={
- '@icecc@' : '$ICECC',
- '@icecc_version@' : '$ICECC_VERSION',
- '@icecc_version_arch@' : icecc_version_arch_string,
+ '@icecc@': '$ICECC',
+ '@icecc_version@': '$ICECC_VERSION',
+ '@icecc_version_arch@': icecc_version_arch_string,
},
# Don't change around the suffixes
@@ -333,7 +329,7 @@ def generate(env):
# so that it knows to invoke SCons to produce it as part of
# TEMPLATE expansion. Since we have set NINJA_SKIP=True for
# setupEnv, we need to reverse that here.
- NINJA_SKIP=False
+ NINJA_SKIP=False,
)
setupEnv.AddPostAction(
@@ -405,8 +401,7 @@ def generate(env):
continue
base = emitterdict[suffix]
emitterdict[suffix] = SCons.Builder.ListEmitter(
- [base, icecc_toolchain_dependency_emitter]
- )
+ [base, icecc_toolchain_dependency_emitter], )
# Check whether ccache is requested and is a valid tool.
if "CCACHE" in env:
@@ -479,10 +474,10 @@ def generate(env):
shell_env = existing_gen(env, target, source)
else:
shell_env = env['ENV'].copy()
- shell_env['CCACHE_PREFIX'] = env.File(env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
+ shell_env['CCACHE_PREFIX'] = env.File(
+ env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
return shell_env
-
env['SHELL_ENV_GENERATOR'] = icecc_ccache_prefix_gen
else:
@@ -508,9 +503,10 @@ def generate(env):
# jobs, figure out what sort they are and extend this part of the
# setup.
def icerun_generator(target, source, env, for_signature):
- if "conftest" not in str(target[0]):
- return '$ICERUN'
- return ''
+ if "conftest" not in str(target[0]):
+ return '$ICERUN'
+ return ''
+
env['ICERUN_GENERATOR'] = icerun_generator
icerun_commands = [
@@ -575,7 +571,9 @@ def exists(env):
else:
icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
if not icecc_create_env_bin:
- print(f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected")
+ print(
+ f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected"
+ )
for line in pipe.stdout:
line = line.decode("utf-8")
@@ -594,6 +592,8 @@ def exists(env):
if validated:
env['ICECREAM_VERSION'] = icecc_version
else:
- print(f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}")
+ print(
+ f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}"
+ )
return validated
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
index 04b0db0cd62..dd3b5b65ee3 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""IDL Compiler Scons Tool."""
import os.path
@@ -39,9 +38,7 @@ def idlc_emitter(target, source, env):
first_source = str(source[0])
if not first_source.endswith(".idl"):
- raise ValueError(
- "Bad idl file name '%s', it must end with '.idl' " % (first_source)
- )
+ raise ValueError("Bad idl file name '%s', it must end with '.idl' " % (first_source))
base_file_name, _ = SCons.Util.splitext(str(target[0]))
target_source = env.File(base_file_name + "_gen.cpp")
@@ -79,16 +76,12 @@ def idl_scanner(node, env, path):
try:
with open(str(node), encoding="utf-8") as file_stream:
- parsed_doc = idlc.parser.parse(
- file_stream, str(node), resolver
- )
+ parsed_doc = idlc.parser.parse(file_stream, str(node), resolver)
except OSError:
return nodes_deps_list
if not parsed_doc.errors and parsed_doc.spec.imports is not None:
- nodes_deps_list.extend(
- [env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)]
- )
+ nodes_deps_list.extend([env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)])
setattr(node.attributes, "IDL_NODE_DEPS", nodes_deps_list)
return nodes_deps_list
@@ -122,20 +115,20 @@ def generate(env):
env["IDLC"] = "$PYTHON buildscripts/idl/idlc.py"
base_dir = env.Dir("$BUILD_DIR").path
env["IDLCFLAGS"] = [
- "--include", "src",
- "--base_dir", base_dir,
- "--target_arch", "$TARGET_ARCH",
+ "--include",
+ "src",
+ "--base_dir",
+ base_dir,
+ "--target_arch",
+ "$TARGET_ARCH",
]
env["IDLCCOM"] = "$IDLC $IDLCFLAGS --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES"
env["IDLCCOMSTR"] = ("Generating ${TARGETS[0]}"
- if not env.get("VERBOSE", "").lower() in ['true', '1']
- else None)
+ if not env.get("VERBOSE", "").lower() in ['true', '1'] else None)
env["IDLCSUFFIX"] = ".idl"
global IDL_GLOBAL_DEPS
- IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob(
- "#buildscripts/idl/idl/*.py"
- )
+ IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob("#buildscripts/idl/idl/*.py")
env["IDL_HAS_INLINE_DEPENDENCIES"] = True
diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py
index ebcf3a87dcc..9a9cf3748c4 100644
--- a/site_scons/site_tools/incremental_link.py
+++ b/site_scons/site_tools/incremental_link.py
@@ -32,9 +32,10 @@ def generate(env):
builders = env["BUILDERS"]
for builder in ("Program", "SharedLibrary", "LoadableModule"):
emitter = builders[builder].emitter
- builders[builder].emitter = SCons.Builder.ListEmitter(
- [emitter, _tag_as_precious,]
- )
+ builders[builder].emitter = SCons.Builder.ListEmitter([
+ emitter,
+ _tag_as_precious,
+ ])
def exists(env):
@@ -46,12 +47,8 @@ def exists(env):
# On posix platforms, excluding darwin, we may have enabled
# incremental linking. Check for the relevant flags.
- if (
- env.TargetOSIs("posix")
- and not env.TargetOSIs("darwin")
- and "-fuse-ld=gold" in env["LINKFLAGS"]
- and "-Wl,--incremental" in env["LINKFLAGS"]
- ):
+ if (env.TargetOSIs("posix") and not env.TargetOSIs("darwin")
+ and "-fuse-ld=gold" in env["LINKFLAGS"] and "-Wl,--incremental" in env["LINKFLAGS"]):
return True
return False
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
index 912c495891f..adcb69ed2c5 100755
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -52,10 +52,8 @@ def jsToHeader(target, source):
h.append("0};")
# symbols aren't exported w/o this
h.append("extern const JSFile %s;" % objname)
- h.append(
- 'const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };'
- % (objname, filename.replace("\\", "/"), stringname, stringname)
- )
+ h.append('const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };' %
+ (objname, filename.replace("\\", "/"), stringname, stringname))
h.append("} // namespace JSFiles")
h.append("} // namespace mongo")
diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py
index 5fe35b038d3..e52e03a6809 100644
--- a/site_scons/site_tools/mongo_benchmark.py
+++ b/site_scons/site_tools/mongo_benchmark.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering benchmarks.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -54,9 +54,8 @@ def build_benchmark(env, target, source, **kwargs):
benchmark_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- benchmark_test_components
- )
+ benchmark_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(benchmark_test_components)
kwargs["AIB_COMPONENTS_EXTRA"] = list(benchmark_test_components)
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index cbaadeb610d..af400ab805e 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering integration tests.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -48,9 +48,8 @@ def build_cpp_integration_test(env, target, source, **kwargs):
integration_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- integration_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(integration_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(integration_test_components)
diff --git a/site_scons/site_tools/mongo_libfuzzer.py b/site_scons/site_tools/mongo_libfuzzer.py
index bcbc0412688..90a0db807c7 100644
--- a/site_scons/site_tools/mongo_libfuzzer.py
+++ b/site_scons/site_tools/mongo_libfuzzer.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering libfuzzer tests.
"""
from SCons.Script import Action
@@ -48,17 +47,14 @@ def build_cpp_libfuzzer_test(env, target, source, **kwargs):
myenv.Prepend(LINKFLAGS=[sanitizer_option])
libfuzzer_test_components = {"tests", "fuzzertests"}
- if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith(
- "-fuzzertest"
- ):
+ if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith("-fuzzertest"):
kwargs["AIB_COMPONENT"] += "-fuzzertest"
if "AIB_COMPONENTS_EXTRA" in kwargs:
- libfuzzer_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- libfuzzer_test_components
- )
+ libfuzzer_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(libfuzzer_test_components)
- kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
+ kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
# Fuzzer tests are inherenently undecidable (see
# mongo_test_execution.py for details on undecidability).
diff --git a/site_scons/site_tools/mongo_test_execution.py b/site_scons/site_tools/mongo_test_execution.py
index 2527ba63a12..5c233ef97d0 100644
--- a/site_scons/site_tools/mongo_test_execution.py
+++ b/site_scons/site_tools/mongo_test_execution.py
@@ -29,6 +29,7 @@ import auto_install_binaries
_proof_scanner_cache_key = "proof_scanner_cache"
_associated_proof = "associated_proof_key"
+
def proof_generator_command_scanner_func(node, env, path):
results = getattr(node.attributes, _proof_scanner_cache_key, None)
if results is not None:
@@ -37,20 +38,22 @@ def proof_generator_command_scanner_func(node, env, path):
setattr(node.attributes, _proof_scanner_cache_key, results)
return results
+
proof_generator_command_scanner = SCons.Scanner.Scanner(
function=proof_generator_command_scanner_func,
path_function=None,
- recursive=True
+ recursive=True,
)
+
def auto_prove_task(env, component, role):
entry = auto_install_binaries.get_alias_map_entry(env, component, role)
return [
- getattr(f.attributes, _associated_proof)
- for f in entry.files
+ getattr(f.attributes, _associated_proof) for f in entry.files
if hasattr(f.attributes, _associated_proof)
]
+
def generate_test_execution_aliases(env, test):
installed = [test]
if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test):
@@ -90,7 +93,8 @@ def generate_test_execution_aliases(env, test):
verbose_source_command = test_env.Command(
target=f"#+{target_name}-{source_name}",
source=installed[0],
- action="$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
+ action=
+ "$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
TEST_SOURCE_FILE_NAME=source_name,
NINJA_POOL="console",
)
@@ -102,7 +106,10 @@ def generate_test_execution_aliases(env, test):
alias = env.Alias(f'+{source_name}', verbose_source_command)
if len(alias[0].children()) > 1:
- raise SCons.Errors.BuildError(alias[0].children()[0], f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique.")
+ raise SCons.Errors.BuildError(
+ alias[0].children()[0],
+ f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique."
+ )
proof_generator_command = test_env.Command(
target=[
@@ -110,11 +117,8 @@ def generate_test_execution_aliases(env, test):
'${SOURCE}.status',
],
source=installed[0],
- action=SCons.Action.Action(
- "$PROOF_GENERATOR_COMMAND",
- "$PROOF_GENERATOR_COMSTR"
- ),
- source_scanner=proof_generator_command_scanner
+ action=SCons.Action.Action("$PROOF_GENERATOR_COMMAND", "$PROOF_GENERATOR_COMSTR"),
+ source_scanner=proof_generator_command_scanner,
)
# We assume tests are provable by default, but some tests may not
@@ -128,10 +132,7 @@ def generate_test_execution_aliases(env, test):
proof_analyzer_command = test_env.Command(
target='${SOURCES[1].base}.proof',
source=proof_generator_command,
- action=SCons.Action.Action(
- "$PROOF_ANALYZER_COMMAND",
- "$PROOF_ANALYZER_COMSTR"
- )
+ action=SCons.Action.Action("$PROOF_ANALYZER_COMMAND", "$PROOF_ANALYZER_COMSTR"),
)
proof_analyzer_alias = env.Alias(
@@ -143,6 +144,7 @@ def generate_test_execution_aliases(env, test):
# TODO: Should we enable proof at the file level?
+
def exists(env):
return True
@@ -153,14 +155,13 @@ def generate(env):
env.AddMethod(generate_test_execution_aliases, "GenerateTestExecutionAliases")
env["TEST_EXECUTION_SUFFIX_DENYLIST"] = env.get(
- "TEST_EXECUTION_SUFFIX_DENYLIST", [".in"]
+ "TEST_EXECUTION_SUFFIX_DENYLIST",
+ [".in"],
)
- env.AppendUnique(
- AIB_TASKS={
- "prove": (auto_prove_task, False),
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "prove": (auto_prove_task, False),
+ })
# TODO: Should we have some sort of prefix_xdir for the output location for these? Something like
# $PREFIX_VARCACHE and which in our build is pre-populated to $PREFIX/var/cache/mongo or similar?
diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py
index 1b02c52eb8e..a000c85cca2 100644
--- a/site_scons/site_tools/mongo_test_list.py
+++ b/site_scons/site_tools/mongo_test_list.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building test lists for Resmoke"""
import SCons
@@ -63,9 +62,9 @@ def test_list_builder_action(env, target, source):
TEST_LIST_BUILDER = SCons.Builder.Builder(
action=SCons.Action.FunctionAction(
- test_list_builder_action, {"cmdstr": "Generating $TARGETS"},
- )
-)
+ test_list_builder_action,
+ {"cmdstr": "Generating $TARGETS"},
+ ))
def exists(env):
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index 33373282606..f06a64e191d 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -19,12 +19,12 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering unit tests."""
from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -53,9 +53,8 @@ def build_cpp_unit_test(env, target, source, **kwargs):
unit_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- unit_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(unit_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(unit_test_components)
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 1c76bd92478..df42f03f884 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Generate build.ninja files from SCons aliases."""
import sys
@@ -79,9 +78,10 @@ def _mkdir_action_function(env, node):
# to an invalid ninja file.
"variables": {
# On Windows mkdir "-p" is always on
- "cmd": "mkdir {args}".format(
- args = ' '.join(get_outputs(node)) + " & exit /b 0" if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)),
- ),
+ "cmd":
+ "mkdir {args}".format(
+ args=' '.join(get_outputs(node)) + " & exit /b 0"
+ if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)), ),
},
}
@@ -130,9 +130,7 @@ def alias_to_ninja_build(node):
return {
"outputs": get_outputs(node),
"rule": "phony",
- "implicit": [
- get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)
- ],
+ "implicit": [get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)],
}
@@ -140,18 +138,22 @@ def get_order_only(node):
"""Return a list of order only dependencies for node."""
if node.prerequisites is None:
return []
- return [get_path(src_file(prereq)) for prereq in node.prerequisites if is_valid_dependent_node(prereq)]
+ return [
+ get_path(src_file(prereq)) for prereq in node.prerequisites
+ if is_valid_dependent_node(prereq)
+ ]
def get_dependencies(node, skip_sources=False):
"""Return a list of dependencies for node."""
if skip_sources:
return [
- get_path(src_file(child))
- for child in node.children()
+ get_path(src_file(child)) for child in node.children()
if child not in node.sources and is_valid_dependent_node(child)
]
- return [get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)]
+ return [
+ get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)
+ ]
def get_inputs(node, skip_unknown_types=False):
@@ -180,10 +182,12 @@ def get_inputs(node, skip_unknown_types=False):
else:
if skip_unknown_types:
continue
- raise Exception("Can't process {} node '{}' as an input for '{}'".format(
- type(input_node),
- str(input_node),
- str(node)))
+ raise Exception(
+ "Can't process {} node '{}' as an input for '{}'".format(
+ type(input_node),
+ str(input_node),
+ str(node),
+ ), )
# convert node items into raw paths/aliases for ninja
return [get_path(src_file(o)) for o in ninja_nodes]
@@ -204,6 +208,7 @@ def get_outputs(node):
return outputs
+
def generate_depfile(env, node, dependencies):
"""
Ninja tool function for writing a depfile. The depfile should include
@@ -239,6 +244,7 @@ def generate_depfile(env, node, dependencies):
with open(depfile, 'w') as f:
f.write(depfile_contents)
+
class SConsToNinjaTranslator:
"""Translates SCons Actions into Ninja build objects."""
@@ -330,25 +336,19 @@ class SConsToNinjaTranslator:
if handler is not None:
return handler(node.env if node.env else self.env, node)
- raise Exception(
- "Found unhandled function action {}, "
- " generating scons command to build\n"
- "Note: this is less efficient than Ninja,"
- " you can write your own ninja build generator for"
- " this function using NinjaRegisterFunctionHandler".format(name)
- )
+ raise Exception("Found unhandled function action {}, "
+ " generating scons command to build\n"
+ "Note: this is less efficient than Ninja,"
+ " you can write your own ninja build generator for"
+ " this function using NinjaRegisterFunctionHandler".format(name))
# pylint: disable=too-many-branches
def handle_list_action(self, node, action):
"""TODO write this comment"""
results = [
- self.action_to_ninja_build(node, action=act)
- for act in action.list
- if act is not None
- ]
- results = [
- result for result in results if result is not None and result["outputs"]
+ self.action_to_ninja_build(node, action=act) for act in action.list if act is not None
]
+ results = [result for result in results if result is not None and result["outputs"]]
if not results:
return None
@@ -450,22 +450,28 @@ class NinjaState:
scons_escape = env.get("ESCAPE", lambda x: x)
self.variables = {
- "COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
- "NOOP": "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
- "SCONS_INVOCATION": "{} {} __NINJA_NO=1 $out".format(
- sys.executable,
- " ".join(
- [ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS]
+ "COPY":
+ "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
+ "NOOP":
+ "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
+ "SCONS_INVOCATION":
+ "{} {} __NINJA_NO=1 $out".format(
+ sys.executable,
+ " ".join([
+ ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv
+ if arg not in COMMAND_LINE_TARGETS
+ ]),
),
- ),
- "SCONS_INVOCATION_W_TARGETS": "{} {}".format(
- sys.executable, " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])
- ),
+ "SCONS_INVOCATION_W_TARGETS":
+ "{} {}".format(
+ sys.executable,
+ " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])),
# This must be set to a global default per:
# https://ninja-build.org/manual.html
#
# (The deps section)
- "msvc_deps_prefix": "Note: including file:",
+ "msvc_deps_prefix":
+ "Note: including file:",
}
self.rules = {
@@ -505,20 +511,21 @@ class NinjaState:
# to do the same. See related for more info:
# https://jira.mongodb.org/browse/SERVER-49457
"AR": {
- "command": "{}$env$AR @$out.rsp".format(
- '' if sys.platform == "win32" else "rm -f $out && "
- ),
- "description": "Archiving $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- "pool": "local_pool",
+ "command":
+ "{}$env$AR @$out.rsp".format('' if sys.platform == "win32" else "rm -f $out && "
+ ),
+ "description":
+ "Archiving $out",
+ "rspfile":
+ "$out.rsp",
+ "rspfile_content":
+ "$rspc",
+ "pool":
+ "local_pool",
},
"SYMLINK": {
"command": (
- "cmd /c mklink $out $in"
- if sys.platform == "win32"
- else "ln -s $in $out"
- ),
+ "cmd /c mklink $out $in" if sys.platform == "win32" else "ln -s $in $out"),
"description": "Symlink $in -> $out",
},
"NOOP": {
@@ -678,20 +685,17 @@ class NinjaState:
if generated_sources_alias and generated_sources_build:
generated_source_files = sorted(
- [] if not generated_sources_build else generated_sources_build['implicit']
- )
+ [] if not generated_sources_build else generated_sources_build['implicit'])
+
def check_generated_source_deps(build):
- return (
- build != generated_sources_build
- and set(build["outputs"]).isdisjoint(generated_source_files)
- )
+ return (build != generated_sources_build
+ and set(build["outputs"]).isdisjoint(generated_source_files))
else:
generated_sources_build = None
generated_source_files = sorted({
output
# First find builds which have header files in their outputs.
- for build in self.builds.values()
- if self.has_generated_sources(build["outputs"])
+ for build in self.builds.values() if self.has_generated_sources(build["outputs"])
for output in build["outputs"]
# Collect only the header files from the builds with them
# in their output. We do this because is_generated_source
@@ -706,14 +710,13 @@ class NinjaState:
ninja.build(
outputs=generated_sources_alias,
rule="phony",
- implicit=generated_source_files
+ implicit=generated_source_files,
)
+
def check_generated_source_deps(build):
- return (
- not build["rule"] == "INSTALL"
- and set(build["outputs"]).isdisjoint(generated_source_files)
- and set(build.get("implicit", [])).isdisjoint(generated_source_files)
- )
+ return (not build["rule"] == "INSTALL"
+ and set(build["outputs"]).isdisjoint(generated_source_files)
+ and set(build.get("implicit", [])).isdisjoint(generated_source_files))
template_builders = []
@@ -730,10 +733,7 @@ class NinjaState:
# sources and none of the direct implicit dependencies are
# generated sources or else we will create a dependency
# cycle.
- if (
- generated_source_files
- and check_generated_source_deps(build)
- ):
+ if (generated_source_files and check_generated_source_deps(build)):
# Make all non-generated source targets depend on
# _generated_sources. We use order_only for generated
@@ -787,7 +787,9 @@ class NinjaState:
if remaining_outputs:
ninja.build(
- outputs=sorted(remaining_outputs), rule="phony", implicit=first_output,
+ outputs=sorted(remaining_outputs),
+ rule="phony",
+ implicit=first_output,
)
build["outputs"] = first_output
@@ -799,7 +801,8 @@ class NinjaState:
# be repurposed for anything, as long as you have a way to regenerate the depfile.
# More specific info can be found here: https://ninja-build.org/manual.html#_depfile
if rule is not None and rule.get('depfile') and build.get('deps_files'):
- path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
+ path = build['outputs'] if SCons.Util.is_List(
+ build['outputs']) else [build['outputs']]
generate_depfile(self.env, path[0], build.pop('deps_files', []))
if "inputs" in build:
@@ -842,7 +845,8 @@ class NinjaState:
# list of build generation about. However, because the generate rule
# is hardcoded here, we need to do this generate_depfile call manually.
ninja_file_path = self.env.File(ninja_file).path
- ninja_in_file_path = os.path.join(get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
+ ninja_in_file_path = os.path.join(
+ get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
generate_depfile(
self.env,
ninja_in_file_path,
@@ -876,23 +880,23 @@ class NinjaState:
pool="console",
implicit=[ninja_file],
variables={
- "cmd": "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
- ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else ''
- )
+ "cmd":
+ "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
+ ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else '')
},
order_only=[generated_sources_alias],
)
ninja.build(
- "compiledb", rule="phony", implicit=["compile_commands.json"],
+ "compiledb",
+ rule="phony",
+ implicit=["compile_commands.json"],
)
# Look in SCons's list of DEFAULT_TARGETS, find the ones that
# we generated a ninja build rule for.
scons_default_targets = [
- get_path(tgt)
- for tgt in SCons.Script.DEFAULT_TARGETS
- if get_path(tgt) in self.built
+ get_path(tgt) for tgt in SCons.Script.DEFAULT_TARGETS if get_path(tgt) in self.built
]
# If we found an overlap between SCons's list of default
@@ -972,8 +976,7 @@ def get_command_env(env, target, source):
ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env, target, source)
scons_specified_env = {
key: value
- for key, value in ENV.items()
- if key not in os.environ or os.environ.get(key, None) != value
+ for key, value in ENV.items() if key not in os.environ or os.environ.get(key, None) != value
}
windows = env["PLATFORM"] == "win32"
@@ -1002,7 +1005,8 @@ def get_command_env(env, target, source):
# doesn't make builds on paths with spaces (Ninja and SCons issues)
# nor expanding response file paths with spaces (Ninja issue) work.
value = value.replace(r' ', r'$ ')
- command_env += "export {}='{}';".format(key, env.subst(value, target=target, source=source))
+ command_env += "export {}='{}';".format(key,
+ env.subst(value, target=target, source=source))
env["NINJA_ENV_VAR_CACHE"] = command_env
return command_env
@@ -1030,15 +1034,11 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
cmd_list, _, _ = action.process(targets, sources, env, executor=executor)
cmd_list = [str(c).replace("$", "$$") for c in cmd_list[0]]
else:
- command = generate_command(
- env, node, action, targets, sources, executor=executor
- )
+ command = generate_command(env, node, action, targets, sources, executor=executor)
cmd_list = shlex.split(command)
if tool_is_dynamic:
- tool_command = env.subst(
- tool, target=targets, source=sources, executor=executor
- )
+ tool_command = env.subst(tool, target=targets, source=sources, executor=executor)
else:
tool_command = tool
@@ -1046,11 +1046,8 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
# Add 1 so we always keep the actual tool inside of cmd
tool_idx = cmd_list.index(tool_command) + 1
except ValueError:
- raise Exception(
- "Could not find tool {} in {} generated from {}".format(
- tool, cmd_list, get_comstr(env, action, targets, sources)
- )
- )
+ raise Exception("Could not find tool {} in {} generated from {}".format(
+ tool, cmd_list, get_comstr(env, action, targets, sources)))
cmd, rsp_content = cmd_list[:tool_idx], cmd_list[tool_idx:]
rsp_content = " ".join(rsp_content)
@@ -1062,7 +1059,10 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
for key, value in custom_env.items():
variables["env"] += env.subst(
- f"export {key}={value};", target=targets, source=sources, executor=executor
+ f"export {key}={value};",
+ target=targets,
+ source=sources,
+ executor=executor,
) + " "
return rule, variables, [tool_command]
@@ -1114,7 +1114,7 @@ def get_generic_shell_command(env, node, action, targets, sources, executor=None
# generally this function will not be used soley and is more like a template to generate
# the basics for a custom provider which may have more specific options for a provier
# function for a custom NinjaRuleMapping.
- []
+ [],
)
@@ -1151,7 +1151,14 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
return None
provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command)
- rule, variables, provider_deps = provider(sub_env, node, action, tlist, slist, executor=executor)
+ rule, variables, provider_deps = provider(
+ sub_env,
+ node,
+ action,
+ tlist,
+ slist,
+ executor=executor,
+ )
# Get the dependencies for all targets
implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)})
@@ -1174,7 +1181,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# in some case the tool could be in the local directory and be suppled without the ext
# such as in windows, so append the executable suffix and check.
prog_suffix = sub_env.get('PROGSUFFIX', '')
- provider_dep_ext = provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix
+ provider_dep_ext = provider_dep if provider_dep.endswith(
+ prog_suffix) else provider_dep + prog_suffix
if os.path.exists(provider_dep_ext):
implicit.append(provider_dep_ext)
continue
@@ -1182,7 +1190,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# Many commands will assume the binary is in the path, so
# we accept this as a possible input from a given command.
- provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(provider_dep, path=os.environ["PATH"])
+ provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(
+ provider_dep, path=os.environ["PATH"])
if provider_dep_abspath:
implicit.append(provider_dep_abspath)
continue
@@ -1262,7 +1271,8 @@ def register_custom_rule_mapping(env, pre_subst_string, rule):
__NINJA_RULE_MAPPING[pre_subst_string] = rule
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False, use_response_file=False, response_file_content="$rspc"):
+def register_custom_rule(env, rule, command, description="", deps=None, pool=None,
+ use_depfile=False, use_response_file=False, response_file_content="$rspc"):
"""Allows specification of Ninja rules from inside SCons files."""
rule_obj = {
"command": command,
@@ -1289,10 +1299,12 @@ def register_custom_pool(env, pool, size):
"""Allows the creation of custom Ninja pools"""
env[NINJA_POOLS][pool] = size
+
def set_build_node_callback(env, node, callback):
if 'conftest' not in str(node):
setattr(node.attributes, "ninja_build_callback", callback)
+
def ninja_csig(original):
"""Return a dummy csig"""
@@ -1316,6 +1328,7 @@ def ninja_contents(original):
return wrapper
+
def CheckNinjaCompdbExpand(env, context):
""" Configure check testing if ninja's compdb can expand response files"""
@@ -1333,11 +1346,13 @@ def CheckNinjaCompdbExpand(env, context):
cmd = echo
pool = console
rspc = "test"
- """))
+ """),
+ )
result = '@fake_output.txt.rsp' not in output
context.Result(result)
return result
+
def ninja_stat(_self, path):
"""
Eternally memoized stat call.
@@ -1464,9 +1479,13 @@ def generate(env):
# exists upstream: https://github.com/SCons/scons/issues/3625
def ninja_generate_deps(env):
return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True))
+
env['_NINJA_REGENERATE_DEPS_FUNC'] = ninja_generate_deps
- env['NINJA_REGENERATE_DEPS'] = env.get('NINJA_REGENERATE_DEPS', '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}')
+ env['NINJA_REGENERATE_DEPS'] = env.get(
+ 'NINJA_REGENERATE_DEPS',
+ '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}',
+ )
# This adds the required flags such that the generated compile
# commands will create depfiles as appropriate in the Ninja file.
@@ -1515,12 +1534,8 @@ def generate(env):
from SCons.Tool.mslink import compositeLinkAction
if env["LINKCOM"] == compositeLinkAction:
- env[
- "LINKCOM"
- ] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
- env[
- "SHLINKCOM"
- ] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
+ env["LINKCOM"] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
+ env["SHLINKCOM"] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
# Normally in SCons actions for the Program and *Library builders
# will return "${*COM}" as their pre-subst'd command line. However
@@ -1612,12 +1627,8 @@ def generate(env):
# slows down the build significantly and we don't need contents or
# content signatures calculated when generating a ninja file since
# we're not doing any SCons caching or building.
- SCons.Executor.Executor.get_contents = ninja_contents(
- SCons.Executor.Executor.get_contents
- )
- SCons.Node.Alias.Alias.get_contents = ninja_contents(
- SCons.Node.Alias.Alias.get_contents
- )
+ SCons.Executor.Executor.get_contents = ninja_contents(SCons.Executor.Executor.get_contents)
+ SCons.Node.Alias.Alias.get_contents = ninja_contents(SCons.Node.Alias.Alias.get_contents)
SCons.Node.FS.File.get_contents = ninja_contents(SCons.Node.FS.File.get_contents)
SCons.Node.FS.File.get_csig = ninja_csig(SCons.Node.FS.File.get_csig)
SCons.Node.FS.Dir.get_csig = ninja_csig(SCons.Node.FS.Dir.get_csig)
@@ -1689,9 +1700,10 @@ def generate(env):
try:
emitter = builder.emitter
if emitter is not None:
- builder.emitter = SCons.Builder.ListEmitter(
- [emitter, ninja_file_depends_on_all]
- )
+ builder.emitter = SCons.Builder.ListEmitter([
+ emitter,
+ ninja_file_depends_on_all,
+ ], )
else:
builder.emitter = ninja_file_depends_on_all
# Users can inject whatever they want into the BUILDERS
diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py
index 677ef75723e..08c78f4ef32 100644
--- a/site_scons/site_tools/separate_debug.py
+++ b/site_scons/site_tools/separate_debug.py
@@ -34,15 +34,15 @@ def _update_builder(env, builder):
if origin is not None:
origin_results = old_scanner(origin, env, path)
for origin_result in origin_results:
- origin_result_debug_files = getattr(
- origin_result.attributes, "separate_debug_files", None
- )
+ origin_result_debug_files = getattr(origin_result.attributes,
+ "separate_debug_files", None)
if origin_result_debug_files is not None:
results.extend(origin_result_debug_files)
return results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=old_path_function,
+ function=new_scanner,
+ path_function=old_path_function,
)
base_action = builder.action
@@ -57,31 +57,27 @@ def _update_builder(env, builder):
# setup from the etc/scons/xcode_*.vars files, which would be a
# win as well.
if env.TargetOSIs("darwin"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
- "$DSYMUTILCOMSTR"
- ),
- SCons.Action.Action(
- "$STRIP -S ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
+ "$DSYMUTILCOMSTR",
+ ),
+ SCons.Action.Action(
+ "$STRIP -S ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
elif env.TargetOSIs("posix"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
- "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR"
- ),
- SCons.Action.Action(
- "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
+ "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR",
+ ),
+ SCons.Action.Action(
+ "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
else:
pass
@@ -109,13 +105,15 @@ def _update_builder(env, builder):
plist_file = env.File("Contents/Info.plist", directory=dsym_dir)
setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(plist_file.attributes, "aib_additional_directory", "{}/Contents".format(dsym_dir_name))
+ setattr(plist_file.attributes, "aib_additional_directory",
+ "{}/Contents".format(dsym_dir_name))
dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir)
dwarf_file = env.File(target0.name, directory=dwarf_dir)
setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(dwarf_file.attributes, "aib_additional_directory", "{}/Contents/Resources/DWARF".format(dsym_dir_name))
+ setattr(dwarf_file.attributes, "aib_additional_directory",
+ "{}/Contents/Resources/DWARF".format(dsym_dir_name))
debug_files.extend([plist_file, dwarf_file])
@@ -174,8 +172,10 @@ def generate(env):
if not env.Verbose():
env.Append(
- OBJCOPY_ONLY_KEEP_DEBUG_COMSTR="Generating debug info for $TARGET into ${TARGET}.dSYM",
- DEBUGSTRIPCOMSTR="Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
+ OBJCOPY_ONLY_KEEP_DEBUG_COMSTR=
+ "Generating debug info for $TARGET into ${TARGET}.dSYM",
+ DEBUGSTRIPCOMSTR=
+ "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
)
for builder in ["Program", "SharedLibrary", "LoadableModule"]:
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
index 710d828945a..72316dfb968 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -66,7 +66,10 @@ def generate(env):
if not suffix in suffixes:
continue
base = emitterdict[suffix]
- emitterdict[suffix] = SCons.Builder.ListEmitter([base, _dwo_emitter,])
+ emitterdict[suffix] = SCons.Builder.ListEmitter([
+ base,
+ _dwo_emitter,
+ ])
def exists(env):
diff --git a/site_scons/site_tools/tapilink.py b/site_scons/site_tools/tapilink.py
index 0521767fc06..d2fc4b8c340 100644
--- a/site_scons/site_tools/tapilink.py
+++ b/site_scons/site_tools/tapilink.py
@@ -26,6 +26,7 @@ import subprocess
# TODO: DRY this with abilink.py by moving duplicated code out to a common
# support module.
+
def _detect(env):
try:
tapi = env["TAPI"]
@@ -70,9 +71,11 @@ def _add_scanner(builder):
return (getattr(env.Entry(o).attributes, "tbd", o) for o in old_scanner(node, env, path))
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
+
def _add_action(builder):
actions = builder.action
@@ -83,12 +86,11 @@ def _add_action(builder):
# invoking TAPI proves to be expensive, we could address this by
# instead post-processing the "real" .tbd file to strip out the
# UUID, and then potentially even feed it into a hash algorithm.
- builder.action = actions + SCons.Action.Action(
- [
- "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
- "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}"
- ]
- )
+ builder.action = actions + SCons.Action.Action([
+ "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
+ "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}",
+ ])
+
def exists(env):
result = _detect(env) != None
diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py
index 5700996a054..7d34a6bfd37 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -92,7 +92,8 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
@@ -101,8 +102,7 @@ def generate(env):
return
env["ARFLAGS"] = SCons.Util.CLVar(
- [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]]
- )
+ [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]])
# Disable running ranlib, since we added 's' above
env["RANLIBCOM"] = ""
diff --git a/site_scons/site_tools/validate_cache_dir.py b/site_scons/site_tools/validate_cache_dir.py
index b5faee9b3e0..3bd07462ade 100644
--- a/site_scons/site_tools/validate_cache_dir.py
+++ b/site_scons/site_tools/validate_cache_dir.py
@@ -29,19 +29,21 @@ import shutil
import tempfile
import traceback
-
import SCons
cache_debug_suffix = " (target: %s, cachefile: %s) "
+
class InvalidChecksum(SCons.Errors.BuildError):
def __init__(self, src, dst, reason, cache_csig='', computed_csig=''):
self.message = f"ERROR: md5 checksum {reason} for {src} ({dst})"
self.cache_csig = cache_csig
self.computed_csig = computed_csig
+
def __str__(self):
return self.message
+
class CacheTransferFailed(SCons.Errors.BuildError):
def __init__(self, src, dst, reason):
self.message = f"ERROR: cachedir transfer {reason} while transfering {src} to {dst}"
@@ -49,6 +51,7 @@ class CacheTransferFailed(SCons.Errors.BuildError):
def __str__(self):
return self.message
+
class UnsupportedError(SCons.Errors.BuildError):
def __init__(self, class_name, feature):
self.message = f"{class_name} does not support {feature}"
@@ -56,8 +59,8 @@ class UnsupportedError(SCons.Errors.BuildError):
def __str__(self):
return self.message
-class CacheDirValidate(SCons.CacheDir.CacheDir):
+class CacheDirValidate(SCons.CacheDir.CacheDir):
def __init__(self, path):
self.json_log = None
super().__init__(path)
@@ -70,7 +73,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
@staticmethod
def get_file_contents_path(default_cachefile_path):
- return pathlib.Path(default_cachefile_path) / pathlib.Path(default_cachefile_path).name.split('.')[0]
+ return pathlib.Path(default_cachefile_path) / pathlib.Path(
+ default_cachefile_path).name.split('.')[0]
@staticmethod
def get_bad_cachefile_path(cksum_cachefile_dir):
@@ -96,17 +100,20 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
src_file = cls.get_file_contents_path(src)
# using os.path.exists here because: https://bugs.python.org/issue35306
if os.path.exists(str(cls.get_bad_cachefile_path(src))):
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
csig = None
try:
with open(cls.get_hash_path(src_file), 'rb') as f_out:
csig = f_out.read().decode().strip()
except OSError as ex:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
else:
if not csig:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"no content_hash data found")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"no content_hash data found")
with tempfile.TemporaryDirectory() as tmpdirname:
dst_tmp = pathlib.Path(tmpdirname) / os.path.basename(dst)
@@ -118,11 +125,12 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
shutil.move(dst_tmp, dst)
new_csig = SCons.Util.MD5filesignature(dst,
- chunksize=SCons.Node.FS.File.md5_chunksize*1024)
+ chunksize=SCons.Node.FS.File.md5_chunksize * 1024)
if csig != new_csig:
raise InvalidChecksum(
- cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}", cache_csig=csig, computed_csig=new_csig)
+ cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}",
+ cache_csig=csig, computed_csig=new_csig)
@classmethod
def copy_to_cache(cls, env, src, dst):
@@ -145,9 +153,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
raise CacheTransferFailed(src, dst_file, f"failed to create hash file: {ex}") from ex
def log_json_cachedebug(self, node, pushing=False):
- if (pushing
- and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
- return
+ if (pushing and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
+ return
cachefile = self.get_file_contents_path(self.cachepath(node)[1])
if node.fs.exists(cachefile):
@@ -213,8 +220,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
self.debugFP.write(self._format_exception_msg())
def _format_exception_msg(self):
- return ('An exception was detected while using the cache:\n' +
- ' ' + "\n ".join("".join(traceback.format_exc()).split("\n"))) + '\n'
+ return ('An exception was detected while using the cache:\n' + ' ' + "\n ".join(
+ "".join(traceback.format_exc()).split("\n"))) + '\n'
def _log(self, log_msg, json_info, realnode, cachefile):
self.CacheDebug(log_msg + cache_debug_suffix, realnode, cachefile)
@@ -241,12 +248,16 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return
msg = f"Removed bad cachefile {cksum_dir} found in cache."
- self._log(msg, {
- 'type': 'invalid_checksum',
- 'cache_csig': cache_csig,
- 'computed_csig': computed_csig
- }, node, cksum_dir)
-
+ self._log(
+ msg,
+ {
+ 'type': 'invalid_checksum',
+ 'cache_csig': cache_csig,
+ 'computed_csig': computed_csig,
+ },
+ node,
+ cksum_dir,
+ )
def get_cachedir_csig(self, node):
cachedir, cachefile = self.cachepath(node)
@@ -263,9 +274,11 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return dir, path
return dir, str(self.get_cachedir_path(path))
+
def exists(env):
return True
+
def generate(env):
if not env.get('CACHEDIR_CLASS'):
env['CACHEDIR_CLASS'] = CacheDirValidate
diff --git a/site_scons/site_tools/vcredist.py b/site_scons/site_tools/vcredist.py
index 5c8effaadc4..0f86629b281 100644
--- a/site_scons/site_tools/vcredist.py
+++ b/site_scons/site_tools/vcredist.py
@@ -137,8 +137,7 @@ def generate(env):
vs_version = int(msvc_major) + int(msvc_minor)
vs_version_next = vs_version + 1
vs_version_range = "[{vs_version}.0, {vs_version_next}.0)".format(
- vs_version=vs_version, vs_version_next=vs_version_next
- )
+ vs_version=vs_version, vs_version_next=vs_version_next)
if not programfilesx86:
programfilesx86 = _get_programfiles()
@@ -146,25 +145,19 @@ def generate(env):
return
# Use vswhere (it has a fixed stable path) to query where Visual Studio is installed.
- env["MSVS"]["VSINSTALLDIR"] = (
- subprocess.check_output(
- [
- os.path.join(
- programfilesx86,
- "Microsoft Visual Studio",
- "Installer",
- "vswhere.exe",
- ),
- "-version",
- vs_version_range,
- "-property",
- "installationPath",
- "-nologo",
- ]
- )
- .decode("utf-8")
- .strip()
- )
+ env["MSVS"]["VSINSTALLDIR"] = (subprocess.check_output([
+ os.path.join(
+ programfilesx86,
+ "Microsoft Visual Studio",
+ "Installer",
+ "vswhere.exe",
+ ),
+ "-version",
+ vs_version_range,
+ "-property",
+ "installationPath",
+ "-nologo",
+ ]).decode("utf-8").strip())
vsinstall_dir = env["MSVS"]["VSINSTALLDIR"]
@@ -179,19 +172,15 @@ def generate(env):
# TOOO: This x64 needs to be abstracted away. Is it the host
# arch, or the target arch? My guess is host.
vsruntime_key_name = "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format(
- msvc_major=msvc_major
- )
+ msvc_major=msvc_major)
vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vsruntime_key_name)
- vslib_version, vslib_version_type = winreg.QueryValueEx(
- vsruntime_key, "Version"
- )
+ vslib_version, vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version")
except WindowsError:
return
# Fallback to directory search if we don't find the expected version
- redist_path = os.path.join(
- redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1)
- )
+ redist_path = os.path.join(redist_root,
+ re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1))
if not os.path.isdir(redist_path):
redist_path = None
dirs = os.listdir(redist_root)
@@ -228,9 +217,7 @@ def generate(env):
if not expansion:
return
- vcredist_candidates = [
- c.format(expansion) for c in vcredist_search_template_sequence
- ]
+ vcredist_candidates = [c.format(expansion) for c in vcredist_search_template_sequence]
for candidate in vcredist_candidates:
candidate = os.path.join(redist_path, candidate)
if os.path.isfile(candidate):
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
index d40528d3a54..3db0aca4719 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -34,5 +34,4 @@ def generate(env):
if "DEVELOPER_DIR" in os.environ:
env["ENV"]["DEVELOPER_DIR"] = os.environ["DEVELOPER_DIR"]
print(
- "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands"
- )
+ "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands")
diff --git a/src/SConscript b/src/SConscript
index 25f9eb93146..f51cfbd7d19 100644
--- a/src/SConscript
+++ b/src/SConscript
@@ -13,11 +13,9 @@ env = env.Clone()
# Add any "global" dependencies here. This is where we make every build node
# depend on a list of other build nodes, such as an allocator or libunwind
# or libstdx or similar.
-env.AppendUnique(
- LIBDEPS_GLOBAL=[
- '$BUILD_DIR/third_party/shim_allocator',
- ],
-)
+env.AppendUnique(LIBDEPS_GLOBAL=[
+ '$BUILD_DIR/third_party/shim_allocator',
+], )
# NOTE: We must do third_party first as it adds methods to the environment
# that we need in the mongo sconscript
@@ -41,7 +39,6 @@ env.InjectThirdParty(libraries=[
'variant',
])
-
# It would be somewhat better if this could be applied down in
# `src/mongo/SConscript`, since the goal of doing it here rather than
# up in SConstruct is to only enforce this rule for code that we wrote
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index db6a4182ed7..6cb7d85b326 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -15,11 +15,9 @@ env = env.Clone()
env.InjectMongoIncludePaths()
-env.AppendUnique(
- FORCEINCLUDES=[
- 'mongo/platform/basic.h',
- ],
-)
+env.AppendUnique(FORCEINCLUDES=[
+ 'mongo/platform/basic.h',
+], )
env.SConscript(
dirs=[
@@ -53,7 +51,8 @@ env.SConscript(
)
config_header_substs = (
- ('@mongo_config_altivec_vec_vbpermq_output_index@', 'MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX'),
+ ('@mongo_config_altivec_vec_vbpermq_output_index@',
+ 'MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX'),
('@mongo_config_debug_build@', 'MONGO_CONFIG_DEBUG_BUILD'),
('@mongo_config_have_execinfo_backtrace@', 'MONGO_CONFIG_HAVE_EXECINFO_BACKTRACE'),
('@mongo_config_have_explicit_bzero@', 'MONGO_CONFIG_HAVE_EXPLICIT_BZERO'),
@@ -78,20 +77,21 @@ config_header_substs = (
('@mongo_config_wiredtiger_enabled@', 'MONGO_CONFIG_WIREDTIGER_ENABLED'),
)
+
def makeConfigHeaderDefine(self, key):
val = "// #undef {0}".format(key)
if key in self['CONFIG_HEADER_DEFINES']:
val = "#define {0} {1}".format(key, self['CONFIG_HEADER_DEFINES'][key])
return val
+
+
env.AddMethod(makeConfigHeaderDefine)
generateConfigHeaderFile = env.Substfile(
'config.h.in',
- SUBST_DICT=[(k, env.makeConfigHeaderDefine(v)) for (k, v) in config_header_substs]
-)
+ SUBST_DICT=[(k, env.makeConfigHeaderDefine(v)) for (k, v) in config_header_substs])
env.Alias('generated-sources', generateConfigHeaderFile)
-
# NOTE: The 'base' library does not really belong here. Its presence
# here is temporary. Do not add to this library, do not remove from
# it, and do not declare other libraries in this file.
@@ -102,11 +102,9 @@ if use_libunwind == True:
quick_exit_env = baseEnv.Clone()
if has_option('gcov'):
- quick_exit_env.Append(
- CPPDEFINES=[
- 'MONGO_GCOV',
- ],
- )
+ quick_exit_env.Append(CPPDEFINES=[
+ 'MONGO_GCOV',
+ ], )
quick_exit_obj = quick_exit_env.LibraryObject(
target='quick_exit',
source=[
@@ -261,9 +259,11 @@ env.AutoInstall(
# If no module has introduced a file named LICENSE-Enterprise.txt then this
# is a Community build, so inject the AGPL and the Community license
-enterprise_license = [banner for banner in env["MODULE_BANNERS"] if banner.name == "LICENSE-Enterprise.txt"]
+enterprise_license = [
+ banner for banner in env["MODULE_BANNERS"] if banner.name == "LICENSE-Enterprise.txt"
+]
if not enterprise_license:
- env.Append(MODULE_BANNERS = [distsrc.File('LICENSE-Community.txt')])
+ env.Append(MODULE_BANNERS=[distsrc.File('LICENSE-Community.txt')])
# All module banners get staged to the top level of the tarfile, so we
# need to fail if we are going to have a name collision.
diff --git a/src/mongo/base/SConscript b/src/mongo/base/SConscript
index 685e32ce7e1..f71721623ba 100644
--- a/src/mongo/base/SConscript
+++ b/src/mongo/base/SConscript
@@ -4,7 +4,6 @@ Import("env")
env = env.Clone()
-
# This needs to use its own env to tell scons to suppress scanning the .tpl.h and .tpl.cpp inputs
# for #includes since they aren't directly preprocessed. Scons will still scan the generated files
# to produce the correct implicit dependencies when they are compiled.
@@ -13,7 +12,7 @@ env_for_error_codes['SCANNERS'] = []
generateErrorCodes = env_for_error_codes.Command(
target=[
'error_codes.h',
- 'error_codes.cpp'
+ 'error_codes.cpp',
],
source=[
'generate_error_codes.py',
@@ -31,7 +30,7 @@ env.Alias('generated-sources', generateErrorCodes)
env.Library(
target=[
- 'system_error'
+ 'system_error',
],
source=[
'system_error.cpp',
@@ -57,7 +56,7 @@ env.Library(
env.Library(
target=[
- 'secure_allocator'
+ 'secure_allocator',
],
source=[
'secure_allocator.cpp',
@@ -111,4 +110,3 @@ env.Benchmark(
'$BUILD_DIR/mongo/util/processinfo',
],
)
-
diff --git a/src/mongo/bson/SConscript b/src/mongo/bson/SConscript
index d4156225e85..2f5c806d6f6 100644
--- a/src/mongo/bson/SConscript
+++ b/src/mongo/bson/SConscript
@@ -58,7 +58,7 @@ asioEnv.InjectThirdParty('asio')
asioEnv.CppIntegrationTest(
target='bson_integration_test',
source=[
- 'ugly_bson_integration_test.cpp'
+ 'ugly_bson_integration_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/executor/network_interface',
diff --git a/src/mongo/bson/util/SConscript b/src/mongo/bson/util/SConscript
index e759ec9873f..57ea076ecfe 100644
--- a/src/mongo/bson/util/SConscript
+++ b/src/mongo/bson/util/SConscript
@@ -34,7 +34,7 @@ env.Benchmark(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- 'bson_column'
+ 'bson_column',
],
)
@@ -45,12 +45,12 @@ env.CppUnitTest(
'bson_extract_test.cpp',
'bsoncolumn_test.cpp',
'builder_test.cpp',
- 'simple8b_test.cpp',
+ 'simple8b_test.cpp',
'simple8b_type_util_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- 'bson_column',
+ 'bson_column',
'bson_extract',
],
)
@@ -62,6 +62,6 @@ env.Benchmark(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- 'bson_column'
+ 'bson_column',
],
)
diff --git a/src/mongo/client/SConscript b/src/mongo/client/SConscript
index 5010be181f0..1e613f942a0 100644
--- a/src/mongo/client/SConscript
+++ b/src/mongo/client/SConscript
@@ -8,7 +8,7 @@ env = env.Clone()
env.SConscript(
dirs=['sdam'],
- exports=['env']
+ exports=['env'],
)
# Contains only the core ConnectionString functionality, *not* the ability to call connect() and
@@ -38,11 +38,10 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/bson/util/bson_extract',
- '$BUILD_DIR/mongo/db/service_context'
+ '$BUILD_DIR/mongo/db/service_context',
],
)
-
if get_option('ssl') == 'on':
env.Library(
target=[
@@ -53,7 +52,7 @@ if get_option('ssl') == 'on':
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/server_options_core', # For object_check.h
+ '$BUILD_DIR/mongo/db/server_options_core', # For object_check.h
'$BUILD_DIR/mongo/idl/idl_parser',
],
)
@@ -72,7 +71,7 @@ if get_option('ssl') == 'on':
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/server_options_core', # For object_check.h
+ '$BUILD_DIR/mongo/db/server_options_core', # For object_check.h
'$BUILD_DIR/mongo/idl/idl_parser',
'$BUILD_DIR/third_party/shim_kms_message',
'sasl_aws_common',
@@ -143,7 +142,7 @@ env.Library(
'$BUILD_DIR/mongo/bson/util/bson_extract',
'$BUILD_DIR/mongo/executor/remote_command',
'internal_auth',
- 'sasl_client'
+ 'sasl_client',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/auth/auth',
@@ -247,7 +246,7 @@ clientDriverEnv.Library(
'$BUILD_DIR/mongo/db/commands/test_commands_enabled',
'$BUILD_DIR/mongo/transport/message_compressor',
'$BUILD_DIR/mongo/util/net/ssl_manager',
- ]
+ ],
)
env.Library(
@@ -258,7 +257,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/server_parameter',
- ]
+ ],
)
env.Library(
@@ -370,7 +369,7 @@ if wiredtiger:
'server_discovery_monitor_test.cpp',
'server_ping_monitor_test.cpp',
'streamable_replica_set_monitor_error_handler_test.cpp',
- 'streamable_replica_set_monitor_discovery_time_processor_test.cpp'
+ 'streamable_replica_set_monitor_discovery_time_processor_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/client/sdam/sdam',
@@ -467,10 +466,6 @@ env.CppIntegrationTest(
env.Library(
target='dbclient_mockcursor',
- source=[
- 'dbclient_mockcursor.cpp'
- ],
- LIBDEPS_PRIVATE=[
- 'clientdriver_minimal'
- ],
+ source=['dbclient_mockcursor.cpp'],
+ LIBDEPS_PRIVATE=['clientdriver_minimal'],
)
diff --git a/src/mongo/client/sdam/SConscript b/src/mongo/client/sdam/SConscript
index 0b065b6b52a..4563eba4f2a 100644
--- a/src/mongo/client/sdam/SConscript
+++ b/src/mongo/client/sdam/SConscript
@@ -12,20 +12,20 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/idl/server_parameter',
- ]
+ ],
)
env.Library(
target='sdam',
source=[
- 'sdam_configuration.cpp',
+ 'sdam_configuration.cpp',
'sdam_datatypes.cpp',
'server_description.cpp',
'topology_description.cpp',
- 'topology_listener.cpp',
+ 'topology_listener.cpp',
'topology_state_machine.cpp',
'topology_manager.cpp',
- 'server_selector.cpp',
+ 'server_selector.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -39,11 +39,11 @@ env.Library(
],
)
-
sdam_json_test = env.Program(
target='sdam_json_test',
source=[
- 'sdam_json_test_runner.cpp'],
+ 'sdam_json_test_runner.cpp',
+ ],
AIB_COMPONENT="sdam-json-test",
LIBDEPS=[
'$BUILD_DIR/mongo/client/connection_string',
@@ -53,13 +53,13 @@ sdam_json_test = env.Program(
'sdam_configuration_parameters',
'sdam_test',
],
-
)[0]
server_selection_json_test = env.Program(
target='server_selection_json_test',
source=[
- 'server_selection_json_test_runner.cpp'],
+ 'server_selection_json_test_runner.cpp',
+ ],
AIB_COMPONENT="server-selection-json-test",
LIBDEPS=[
'$BUILD_DIR/mongo/client/connection_string',
@@ -129,12 +129,12 @@ env.CppUnitTest(
)
env.CppUnitTest(
- target='server_selector_test',
- source=['server_selector_test.cpp'],
- LIBDEPS=[
- 'sdam',
- 'sdam_test',
- ],
+ target='server_selector_test',
+ source=['server_selector_test.cpp'],
+ LIBDEPS=[
+ 'sdam',
+ 'sdam_test',
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/crypto/SConscript b/src/mongo/crypto/SConscript
index d6e291f712f..b6a3453a16d 100644
--- a/src/mongo/crypto/SConscript
+++ b/src/mongo/crypto/SConscript
@@ -8,39 +8,35 @@ Import([
env = env.Clone()
-env.Library('sha1_block',
- source=[
- 'sha1_block.cpp'
- ],
+env.Library(
+ 'sha1_block',
+ source=['sha1_block.cpp'],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/util/secure_compare_memory',
- ])
-
-env.Library('sha256_block',
- source=[
- 'sha256_block.cpp'
],
+)
+
+env.Library(
+ 'sha256_block',
+ source=['sha256_block.cpp'],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/util/secure_compare_memory',
- ])
+ ],
+)
cryptoEnv = env.Clone()
if "tom" in env["MONGO_CRYPTO"]:
cryptoEnv.InjectThirdParty(libraries=['tomcrypt'])
- cryptoEnv.Append(
- CPPDEFINES=[
- 'LTC_NO_PROTOTYPES',
- ]
- )
+ cryptoEnv.Append(CPPDEFINES=[
+ 'LTC_NO_PROTOTYPES',
+ ])
cryptoEnv.Library(
target='sha_block_${MONGO_CRYPTO}',
- source=[
- 'sha_block_${MONGO_CRYPTO}.cpp'
- ],
+ source=['sha_block_${MONGO_CRYPTO}.cpp'],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'sha1_block',
@@ -48,10 +44,11 @@ cryptoEnv.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/third_party/shim_tomcrypt' if "tom" in env["MONGO_CRYPTO"] else [],
- ]
+ ],
)
-cryptoEnv.Library(target='symmetric_crypto',
+cryptoEnv.Library(
+ target='symmetric_crypto',
source=[
'symmetric_crypto.cpp',
'symmetric_crypto_${MONGO_CRYPTO}.cpp',
@@ -63,7 +60,7 @@ cryptoEnv.Library(target='symmetric_crypto',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/third_party/shim_tomcrypt' if "tom" in env["MONGO_CRYPTO"] else [],
- ]
+ ],
)
env.Library(
@@ -84,9 +81,9 @@ env.Library(
"encryption_fields.idl",
],
LIBDEPS=[
- '$BUILD_DIR/mongo/idl/feature_flag',
- '$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ '$BUILD_DIR/mongo/idl/feature_flag',
+ '$BUILD_DIR/mongo/idl/idl_parser',
+ ],
)
env.Library(
@@ -120,7 +117,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/db/SConscript b/src/mongo/db/SConscript
index 1cdea6aa03b..1943337fcdc 100644
--- a/src/mongo/db/SConscript
+++ b/src/mongo/db/SConscript
@@ -77,7 +77,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/catalog/collection_uuid_mismatch_info',
- ]
+ ],
)
env.Library(
@@ -94,7 +94,7 @@ env.Library(
'$BUILD_DIR/mongo/db/auth/security_token',
'$BUILD_DIR/mongo/idl/feature_flag',
'server_feature_flags',
- ]
+ ],
)
env.Library(
@@ -117,7 +117,6 @@ env.Library(
],
)
-
env.Library(
target="dbmessage",
source=[
@@ -127,7 +126,7 @@ env.Library(
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/transport/transport_layer_common',
'$BUILD_DIR/mongo/util/net/network',
- ]
+ ],
)
env.Library(
@@ -139,7 +138,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/client/authentication',
'$BUILD_DIR/mongo/util/net/ssl_manager',
- ]
+ ],
)
env.Library(
@@ -152,7 +151,7 @@ env.Library(
'$BUILD_DIR/mongo/db/repl/repl_settings',
'$BUILD_DIR/mongo/util/processinfo',
'startup_warnings_common',
- ]
+ ],
)
env.Library(
@@ -169,7 +168,6 @@ env.Library(
target='curop',
source=[
'curop.cpp',
-
],
LIBDEPS=[
'$BUILD_DIR/mongo/bson/mutable/mutable_bson',
@@ -180,8 +178,8 @@ env.Library(
'$BUILD_DIR/mongo/db/stats/timer_stats',
'$BUILD_DIR/mongo/rpc/client_metadata',
'$BUILD_DIR/mongo/transport/service_executor',
- '$BUILD_DIR/mongo/util/diagnostic_info' if get_option(
- 'use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/diagnostic_info'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/fail_point',
'$BUILD_DIR/mongo/util/net/network',
'$BUILD_DIR/mongo/util/progress_meter',
@@ -216,7 +214,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/commands/test_commands_enabled',
- ]
+ ],
)
env.Library(
@@ -227,7 +225,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/util/md5',
- ]
+ ],
)
# Range arithmetic library, used by both mongod and mongos
@@ -270,7 +268,7 @@ env.Library(
env.Library(
target='update_index_data',
source=[
- 'update_index_data.cpp'
+ 'update_index_data.cpp',
],
LIBDEPS=[
'common',
@@ -289,7 +287,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/index_names',
- ]
+ ],
)
env.Library(
@@ -445,7 +443,7 @@ env.Library(
'global_settings',
'server_options_base',
'server_options_servers',
- ]
+ ],
)
env.Library(
@@ -475,8 +473,8 @@ env.Library(
'read_write_concern_provenance',
],
LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/db/server_options_core', # For serverGlobalParams
- ]
+ '$BUILD_DIR/mongo/db/server_options_core', # For serverGlobalParams
+ ],
)
env.Library(
@@ -524,14 +522,14 @@ env.Library(
env.Library(
target='change_stream_change_collection_manager',
source=[
- 'change_stream_change_collection_manager.cpp'
+ 'change_stream_change_collection_manager.cpp',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/catalog/catalog_helpers',
"$BUILD_DIR/mongo/db/catalog/clustered_collection_options",
'$BUILD_DIR/mongo/db/dbhelpers',
'$BUILD_DIR/mongo/db/service_context',
- ]
+ ],
)
env.Library(
@@ -776,7 +774,7 @@ env.Library(
'$BUILD_DIR/mongo/idl/feature_flag',
'$BUILD_DIR/mongo/idl/server_parameter',
'server_options_core',
- ]
+ ],
)
env.Library(
@@ -786,7 +784,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/idl/feature_flag',
- ]
+ ],
)
env.Library(
@@ -802,7 +800,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'internal_transactions_feature_flag',
'service_context',
- ]
+ ],
)
env.Library(
@@ -822,7 +820,7 @@ env.Library(
'$BUILD_DIR/mongo/db/catalog/local_oplog_info',
'$BUILD_DIR/mongo/db/s/sharding_api_d',
'multitenancy',
- ]
+ ],
)
env.Library(
@@ -871,7 +869,7 @@ env.Library(
'stats/top',
'stats/transaction_stats',
'update/update_driver',
- ]
+ ],
)
env.Library(
@@ -902,7 +900,7 @@ env.Library(
target='fle_crud',
source=[
'fle_crud.cpp',
- 'query/fle/server_rewrite.cpp'
+ 'query/fle/server_rewrite.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/crypto/encrypted_field_config',
@@ -1325,7 +1323,7 @@ env.Library(
LIBDEPS_PRIVATE=[
"commands/server_status_core",
"curop",
- ]
+ ],
)
env.Library(
@@ -1445,7 +1443,7 @@ env.Library(
'commands/server_status_core',
'service_context',
'write_ops',
- ]
+ ],
)
env.Library(
@@ -1460,7 +1458,8 @@ env.Library(
'$BUILD_DIR/mongo/db/catalog/clustered_collection_options',
'$BUILD_DIR/mongo/db/query/collation/collator_interface',
'$BUILD_DIR/mongo/db/storage/key_string',
- ],)
+ ],
+)
env.Library(
target='query_exec',
@@ -1839,7 +1838,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/s/sharding_api_d',
'internal_transactions_feature_flag',
- ]
+ ],
)
env.Library(
@@ -1946,7 +1945,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'session_catalog',
- 'transaction'
+ 'transaction',
],
)
@@ -2027,7 +2026,7 @@ env.Library(
],
LIBDEPS=[
'logical_time',
- 'time_proof_service'
+ 'time_proof_service',
],
)
@@ -2104,7 +2103,7 @@ env.Library(
target='vector_clock_mongod',
source=[
's/topology_time_ticker.cpp',
- 'vector_clock_mongod.cpp'
+ 'vector_clock_mongod.cpp',
],
LIBDEPS=[
'vector_clock_mutable',
@@ -2353,7 +2352,7 @@ env.Library(
env.Program(
target="mongotrafficreader",
source=[
- "traffic_reader_main.cpp"
+ "traffic_reader_main.cpp",
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -2372,11 +2371,10 @@ env.Library(
'$BUILD_DIR/mongo/base',
],
LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/util/net/ssl_options_server' if get_option(
- 'ssl') == 'on' else '',
+ '$BUILD_DIR/mongo/util/net/ssl_options_server' if get_option('ssl') == 'on' else '',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
'mongod_options',
- ]
+ ],
)
env.Library(
@@ -2399,8 +2397,8 @@ env.Library(
'$BUILD_DIR/mongo/util/clock_sources',
'$BUILD_DIR/mongo/util/elapsed_tracker',
'$BUILD_DIR/mongo/util/fail_point',
- '$BUILD_DIR/mongo/util/latch_analyzer' if get_option(
- 'use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/latch_analyzer'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/net/network',
'$BUILD_DIR/mongo/util/ntservice',
'$BUILD_DIR/mongo/util/options_parser/options_parser_init',
@@ -2526,8 +2524,8 @@ env.Library(
'$BUILD_DIR/mongo/transport/transport_layer_manager',
'$BUILD_DIR/mongo/util/cmdline_utils/cmdline_utils',
'$BUILD_DIR/mongo/util/concurrency/thread_pool',
- '$BUILD_DIR/mongo/util/latch_analyzer' if get_option(
- 'use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/latch_analyzer'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/net/http_client_impl',
'$BUILD_DIR/mongo/util/net/ssl_manager',
'$BUILD_DIR/mongo/util/signal_handlers',
@@ -2605,16 +2603,15 @@ if env.TargetOSIs('windows'):
('@mongo_version_patch@', version_parts[2]),
('@mongo_version_extra@', version_parts[3]),
('@mongo_version_extra_str@', version_extra),
- ])
+ ],
+ )
env.Alias('generated-sources', generatedDbManifest)
env.Depends("mongod.res", generatedDbManifest)
env.Program(
target="mongod",
- source=[
- 'mongod.cpp'
- ] + env.WindowsResourceFile("mongod.rc"),
+ source=['mongod.cpp'] + env.WindowsResourceFile("mongod.rc"),
LIBDEPS=[
# NOTE: Do not add new libdeps (public or private) here unless
# required by the linker to satisfy symbol dependencies from
diff --git a/src/mongo/db/auth/SConscript b/src/mongo/db/auth/SConscript
index a2bcc5b3295..3df9d6922f2 100644
--- a/src/mongo/db/auth/SConscript
+++ b/src/mongo/db/auth/SConscript
@@ -87,7 +87,7 @@ env.Library(
'$BUILD_DIR/mongo/db/op_observer_util',
'$BUILD_DIR/mongo/db/repl/oplog_entry',
'auth',
- ]
+ ],
)
env.Library(
@@ -120,7 +120,7 @@ env.Library(
'$BUILD_DIR/mongo/db/db_raii',
'$BUILD_DIR/mongo/db/multitenancy',
'auth_types',
- ]
+ ],
)
env.Library(
@@ -208,7 +208,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/api_parameters',
- '$BUILD_DIR/mongo/db/audit', # audit:logLogout in AuthZSession.
+ '$BUILD_DIR/mongo/db/audit', # audit:logLogout in AuthZSession.
'$BUILD_DIR/mongo/db/stats/counters',
'$BUILD_DIR/mongo/idl/server_parameter',
'$BUILD_DIR/mongo/util/caching',
@@ -304,7 +304,7 @@ env.Library(
"$BUILD_DIR/mongo/util/net/ssl_manager",
"$BUILD_DIR/mongo/util/net/ssl_parameters_auth",
'cluster_auth_mode',
- 'security_key'
+ 'security_key',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/server_options_core',
@@ -407,7 +407,7 @@ env.Library(
'sasl_options.idl',
],
LIBDEPS=[
- 'sasl_options'
+ 'sasl_options',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/util/net/network',
@@ -466,7 +466,7 @@ env.Library(
'authz_session_external_state_s.cpp',
'user_cache_invalidator_job.cpp',
'user_cache_invalidator_job_parameters.idl',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
],
@@ -513,7 +513,7 @@ env.Library(
source=[
'ldap_operation_stats.cpp',
'ldap_cumulative_operation_stats.cpp',
- 'user_cache_acquisition_stats.cpp'
+ 'user_cache_acquisition_stats.cpp',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
diff --git a/src/mongo/db/catalog/SConscript b/src/mongo/db/catalog/SConscript
index 3fbdbcd6dcb..debf1c0f2cf 100644
--- a/src/mongo/db/catalog/SConscript
+++ b/src/mongo/db/catalog/SConscript
@@ -25,7 +25,7 @@ env.Library(
'$BUILD_DIR/mongo/db/namespace_string',
'$BUILD_DIR/mongo/idl/basic_types',
'$BUILD_DIR/mongo/idl/idl_parser',
- '$BUILD_DIR/mongo/idl/server_parameter'
+ '$BUILD_DIR/mongo/idl/server_parameter',
],
)
@@ -126,7 +126,7 @@ env.Library(
'$BUILD_DIR/mongo/db/index/index_access_method',
'$BUILD_DIR/mongo/db/storage/key_string',
'validate_state',
- ]
+ ],
)
env.Library(
@@ -144,7 +144,7 @@ env.Library(
'$BUILD_DIR/mongo/db/storage/storage_repair_observer',
'index_repair',
'multi_index_block',
- ]
+ ],
)
env.Library(
@@ -218,7 +218,7 @@ env.Library(
'collection',
'collection_catalog',
'collection_query_info',
- ]
+ ],
)
env.Library(
@@ -271,7 +271,7 @@ env.Library(
'collection_query_info',
'index_build_block',
'index_catalog',
- ]
+ ],
)
env.Library(
@@ -294,7 +294,7 @@ env.Library(
'$BUILD_DIR/mongo/db/views/views',
'$BUILD_DIR/mongo/idl/server_parameter',
'collection',
- ]
+ ],
)
env.Benchmark(
@@ -327,9 +327,7 @@ env.Library(
env.Library(
target="validate_state",
- source=[
- "validate_state.cpp"
- ],
+ source=["validate_state.cpp"],
LIBDEPS_PRIVATE=[
"$BUILD_DIR/mongo/base",
"$BUILD_DIR/mongo/db/catalog_raii",
@@ -341,7 +339,7 @@ env.Library(
"database_holder",
"index_catalog",
"throttle_cursor",
- ]
+ ],
)
env.Library(
@@ -418,7 +416,7 @@ env.Library(
'$BUILD_DIR/mongo/db/multitenancy',
'$BUILD_DIR/mongo/db/views/views',
'database_holder',
- ]
+ ],
)
env.Library(
@@ -448,7 +446,7 @@ env.Library(
'index_key_validate',
'throttle_cursor',
'validate_state',
- ]
+ ],
)
env.Library(
diff --git a/src/mongo/db/catalog/util/SConscript b/src/mongo/db/catalog/util/SConscript
index e6f8b00c3c9..7431dcb165e 100644
--- a/src/mongo/db/catalog/util/SConscript
+++ b/src/mongo/db/catalog/util/SConscript
@@ -6,9 +6,6 @@ env = env.Clone()
env.CppUnitTest(
target='db_catalog_util_test',
- source=[
- 'partitioned_test.cpp'
- ],
- LIBDEPS=[
- ]
+ source=['partitioned_test.cpp'],
+ LIBDEPS=[],
)
diff --git a/src/mongo/db/commands/SConscript b/src/mongo/db/commands/SConscript
index b1aaa872559..adddad9dfd7 100644
--- a/src/mongo/db/commands/SConscript
+++ b/src/mongo/db/commands/SConscript
@@ -15,7 +15,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/server_parameter',
"server_status_core",
- ]
+ ],
)
env.Library(
@@ -27,7 +27,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
@@ -190,7 +190,7 @@ env.Library(
'feature_compatibility_parsers',
'server_status',
'test_commands_enabled',
- ]
+ ],
)
env.Library(
@@ -200,7 +200,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/read_write_concern_defaults',
- ]
+ ],
)
env.Library(
@@ -219,7 +219,7 @@ env.Library(
'$BUILD_DIR/mongo/rpc/client_metadata',
'$BUILD_DIR/mongo/util/net/ssl_manager',
'test_commands_enabled',
- ]
+ ],
)
env.Library(
@@ -235,7 +235,7 @@ env.Library(
'$BUILD_DIR/mongo/db/curop',
'$BUILD_DIR/mongo/db/storage/backup_cursor_hooks',
'fsync_locked',
- ]
+ ],
)
env.Library(
@@ -262,8 +262,7 @@ env.Library(
source=[
'fsync_locked.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
env.Library(
@@ -618,7 +617,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/s/write_ops/cluster_write_ops',
- 'cluster_server_parameter_cmds_idl'
+ 'cluster_server_parameter_cmds_idl',
],
)
@@ -641,7 +640,7 @@ env.Library(
env.Library(
target='current_op_common',
source=[
- 'current_op_common.cpp'
+ 'current_op_common.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/commands',
@@ -651,7 +650,7 @@ env.Library(
'$BUILD_DIR/mongo/db/pipeline/aggregation_request_helper',
'$BUILD_DIR/mongo/db/query/command_request_response',
'$BUILD_DIR/mongo/db/service_context',
- 'test_commands_enabled'
+ 'test_commands_enabled',
],
)
@@ -701,7 +700,7 @@ env.Library(
'$BUILD_DIR/mongo/db/serverless/serverless_types_idl',
'$BUILD_DIR/mongo/idl/feature_flag',
'$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ ],
)
env.Library(
target='txn_cmd_request',
@@ -713,7 +712,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/auth/authprivilege',
'$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ ],
)
env.Library(
@@ -725,7 +724,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/db/write_concern_options',
'$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ ],
)
env.Library(
@@ -734,16 +733,13 @@ env.Library(
'map_reduce_agg.cpp',
],
LIBDEPS=[
- '$BUILD_DIR/mongo/db/commands/servers',
- '$BUILD_DIR/mongo/db/db_raii',
+ '$BUILD_DIR/mongo/db/commands/servers', '$BUILD_DIR/mongo/db/db_raii',
'$BUILD_DIR/mongo/db/index/index_access_methods',
'$BUILD_DIR/mongo/db/pipeline/process_interface/mongo_process_interface',
'$BUILD_DIR/mongo/db/pipeline/process_interface/mongod_process_interface_factory',
- '$BUILD_DIR/mongo/db/query/map_reduce_output_format',
- '$BUILD_DIR/mongo/db/query_exec',
- '$BUILD_DIR/mongo/idl/idl_parser',
- 'map_reduce_parser'
- ]
+ '$BUILD_DIR/mongo/db/query/map_reduce_output_format', '$BUILD_DIR/mongo/db/query_exec',
+ '$BUILD_DIR/mongo/idl/idl_parser', 'map_reduce_parser'
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/db/concurrency/SConscript b/src/mongo/db/concurrency/SConscript
index 916d353bf05..26403ec4a90 100644
--- a/src/mongo/db/concurrency/SConscript
+++ b/src/mongo/db/concurrency/SConscript
@@ -92,7 +92,8 @@ env.Benchmark(
],
LIBDEPS=[
'lock_manager',
- ])
+ ],
+)
env.CppUnitTest(
target='db_concurrency_test',
@@ -112,5 +113,5 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/util/progress_meter',
'exception_util',
'lock_manager',
- ]
+ ],
)
diff --git a/src/mongo/db/cst/SConscript b/src/mongo/db/cst/SConscript
index 16f5a25932f..09c46385bf1 100644
--- a/src/mongo/db/cst/SConscript
+++ b/src/mongo/db/cst/SConscript
@@ -22,7 +22,7 @@ env.Library(
'$BUILD_DIR/mongo/db/pipeline/pipeline',
'$BUILD_DIR/mongo/db/pipeline/variable_validation',
'$BUILD_DIR/mongo/db/query/datetime/date_time_support',
- ]
+ ],
)
env.CppUnitTest(
@@ -43,7 +43,7 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/db/matcher/expressions_mongod_only',
'$BUILD_DIR/mongo/db/query/query_test_service_context',
'cst',
- ]
+ ],
)
env.CppUnitTest(
@@ -56,7 +56,7 @@ env.CppUnitTest(
LIBDEPS=[
'$BUILD_DIR/mongo/db/query/query_test_service_context',
'cst',
- ]
+ ],
)
# Disabled under SERVER-64949.
# env.Benchmark(
diff --git a/src/mongo/db/exec/SConscript b/src/mongo/db/exec/SConscript
index c2caf69f0f2..b2368e49830 100644
--- a/src/mongo/db/exec/SConscript
+++ b/src/mongo/db/exec/SConscript
@@ -16,11 +16,11 @@ env.SConscript(
# WorkingSet target and associated test
env.Library(
- target = "working_set",
- source = [
+ target="working_set",
+ source=[
"working_set.cpp",
],
- LIBDEPS = [
+ LIBDEPS=[
"$BUILD_DIR/mongo/base",
"$BUILD_DIR/mongo/db/bson/dotted_path_support",
"$BUILD_DIR/mongo/db/service_context",
@@ -29,21 +29,21 @@ env.Library(
)
env.Library(
- target = "scoped_timer",
- source = [
+ target="scoped_timer",
+ source=[
"scoped_timer.cpp",
],
- LIBDEPS = [
+ LIBDEPS=[
'$BUILD_DIR/mongo/util/net/network',
],
)
env.Library(
- target = "js_function",
- source = [
+ target="js_function",
+ source=[
"js_function.cpp",
],
- LIBDEPS = [
+ LIBDEPS=[
'$BUILD_DIR/mongo/db/auth/auth',
'$BUILD_DIR/mongo/db/query/query_knobs',
'$BUILD_DIR/mongo/db/service_context',
@@ -52,15 +52,15 @@ env.Library(
)
env.Library(
- target = "bucket_unpacker",
- source = [
+ target="bucket_unpacker",
+ source=[
"bucket_unpacker.cpp",
],
- LIBDEPS = [
+ LIBDEPS=[
"$BUILD_DIR/mongo/db/matcher/expressions",
"document_value/document_value",
],
- LIBDEPS_PRIVATE = [
+ LIBDEPS_PRIVATE=[
"$BUILD_DIR/mongo/bson/util/bson_column",
"$BUILD_DIR/mongo/db/timeseries/timeseries_options",
],
@@ -95,17 +95,17 @@ env.Library(
'inclusion_projection_executor.cpp',
'projection_executor_builder.cpp',
'projection_executor_utils.cpp',
- 'projection_node.cpp'
+ 'projection_node.cpp',
],
LIBDEPS=[
- '$BUILD_DIR/mongo/db/matcher/expressions'
+ '$BUILD_DIR/mongo/db/matcher/expressions',
],
)
env.Library(
target='stagedebug_cmd',
source=[
- 'stagedebug_cmd.cpp'
+ 'stagedebug_cmd.cpp',
],
LIBDEPS=[
"$BUILD_DIR/mongo/db/index/index_access_methods",
@@ -139,7 +139,7 @@ env.CppUnitTest(
],
LIBDEPS=[
"$BUILD_DIR/mongo/base",
- "$BUILD_DIR/mongo/bson/util/bson_column",
+ "$BUILD_DIR/mongo/bson/util/bson_column",
"$BUILD_DIR/mongo/db/auth/authmocks",
"$BUILD_DIR/mongo/db/query/collation/collator_factory_mock",
"$BUILD_DIR/mongo/db/query/collation/collator_interface_mock",
diff --git a/src/mongo/db/exec/document_value/SConscript b/src/mongo/db/exec/document_value/SConscript
index 41e729f9a4c..cce805bac71 100644
--- a/src/mongo/db/exec/document_value/SConscript
+++ b/src/mongo/db/exec/document_value/SConscript
@@ -10,14 +10,14 @@ env.Library(
'document_metadata_fields.cpp',
'value.cpp',
'value_comparator.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/pipeline/field_path',
'$BUILD_DIR/mongo/db/query/datetime/date_time_support',
'$BUILD_DIR/mongo/util/intrusive_counter',
- ]
- )
+ ],
+)
env.Library(
target='document_value_test_util',
diff --git a/src/mongo/db/exec/sbe/SConscript b/src/mongo/db/exec/sbe/SConscript
index c2731874467..6a2503d7f26 100644
--- a/src/mongo/db/exec/sbe/SConscript
+++ b/src/mongo/db/exec/sbe/SConscript
@@ -6,11 +6,11 @@ env.Library(
target='query_sbe_plan_stats',
source=[
'stages/plan_stats.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
- )
+ ],
+)
env.Library(
target='query_sbe_values',
@@ -29,7 +29,7 @@ env.Library(
'$BUILD_DIR/mongo/db/query/query_index_bounds',
'$BUILD_DIR/mongo/db/storage/key_string',
'$BUILD_DIR/mongo/util/regex_util',
- ]
+ ],
)
sbeEnv = env.Clone()
@@ -50,19 +50,19 @@ sbeEnv.Library(
'vm/arith.cpp',
'vm/datetime.cpp',
'vm/vm.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/mongohasher',
'$BUILD_DIR/mongo/db/storage/record_store_base',
'$BUILD_DIR/third_party/shim_snappy',
'query_sbe_values',
- ],
+ ],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/bson/dotted_path_support',
'$BUILD_DIR/mongo/db/sorter/sorter_idl',
- ]
- )
+ ],
+)
sbeEnv.Library(
target='query_sbe_stages',
@@ -87,7 +87,7 @@ sbeEnv.Library(
'stages/union.cpp',
'stages/unique.cpp',
'stages/unwind.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/concurrency/lock_manager',
@@ -103,14 +103,14 @@ sbeEnv.Library(
'$BUILD_DIR/third_party/shim_snappy',
'query_sbe_plan_stats',
'query_sbe_values',
- ],
+ ],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/bson/dotted_path_support',
'$BUILD_DIR/mongo/db/sorter/sorter_idl',
'query_sbe',
'query_sbe_storage',
- ]
- )
+ ],
+)
env.Library(
target='query_sbe_storage',
@@ -119,27 +119,25 @@ env.Library(
'stages/column_scan.cpp',
'stages/ix_scan.cpp',
'stages/scan.cpp',
- ],
+ ],
LIBDEPS=[
- '$BUILD_DIR/mongo/db/db_raii',
- '$BUILD_DIR/mongo/db/index/index_access_method',
- '$BUILD_DIR/mongo/db/storage/execution_context',
- 'query_sbe'
- ]
- )
+ '$BUILD_DIR/mongo/db/db_raii', '$BUILD_DIR/mongo/db/index/index_access_method',
+ '$BUILD_DIR/mongo/db/storage/execution_context', 'query_sbe'
+ ],
+)
env.Library(
target='query_sbe_abt',
source=[
'abt/abt_lower.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/query/optimizer/optimizer',
'query_sbe',
'query_sbe_stages',
'query_sbe_storage',
- ]
- )
+ ],
+)
env.Library(
target='sbe_plan_stage_test',
@@ -216,7 +214,7 @@ env.CppUnitTest(
'values/slot_printer_test.cpp',
'values/value_serialization_test.cpp',
"values/value_test.cpp",
- 'values/write_value_to_stream_test.cpp'
+ 'values/write_value_to_stream_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/auth/authmocks',
@@ -233,15 +231,15 @@ env.Library(
target='sbe_abt_test_util',
source=[
'abt/sbe_abt_test_util.cpp',
- ],
+ ],
LIBDEPS=[
- "$BUILD_DIR/mongo/db/auth/authmocks",
- '$BUILD_DIR/mongo/db/query/query_test_service_context',
- '$BUILD_DIR/mongo/db/query_exec',
- '$BUILD_DIR/mongo/db/service_context_test_fixture',
- 'query_sbe_abt',
- ]
- )
+ "$BUILD_DIR/mongo/db/auth/authmocks",
+ '$BUILD_DIR/mongo/db/query/query_test_service_context',
+ '$BUILD_DIR/mongo/db/query_exec',
+ '$BUILD_DIR/mongo/db/service_context_test_fixture',
+ 'query_sbe_abt',
+ ],
+)
env.CppUnitTest(
target='sbe_abt_test',
@@ -251,6 +249,6 @@ env.CppUnitTest(
],
LIBDEPS=[
'$BUILD_DIR/mongo/unittest/unittest',
- 'sbe_abt_test_util'
+ 'sbe_abt_test_util',
],
)
diff --git a/src/mongo/db/free_mon/SConscript b/src/mongo/db/free_mon/SConscript
index c1fe6d83627..949e1cc31ae 100644
--- a/src/mongo/db/free_mon/SConscript
+++ b/src/mongo/db/free_mon/SConscript
@@ -68,7 +68,6 @@ else:
],
)
-
fmEnv.CppUnitTest(
target='db_free_mon_test',
source=[
diff --git a/src/mongo/db/ftdc/SConscript b/src/mongo/db/ftdc/SConscript
index 72a96523195..ccbb8d4d8e3 100644
--- a/src/mongo/db/ftdc/SConscript
+++ b/src/mongo/db/ftdc/SConscript
@@ -21,14 +21,14 @@ ftdcEnv.Library(
'file_reader.cpp',
'file_writer.cpp',
'util.cpp',
- 'varint.cpp'
+ 'varint.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/bson/util/bson_extract',
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/db/service_context',
- '$BUILD_DIR/third_party/s2/s2', # For VarInt
+ '$BUILD_DIR/third_party/s2/s2', # For VarInt
'$BUILD_DIR/third_party/shim_zlib',
],
)
@@ -51,7 +51,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/commands',
'$BUILD_DIR/mongo/util/processinfo',
- 'ftdc'
+ 'ftdc',
] + platform_libs,
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/server_parameter',
@@ -77,7 +77,7 @@ env.Library(
'$BUILD_DIR/mongo/db/auth/authprivilege',
'$BUILD_DIR/mongo/db/storage/storage_options',
'$BUILD_DIR/mongo/idl/server_parameter',
- 'ftdc_server'
+ 'ftdc_server',
],
)
diff --git a/src/mongo/db/fts/SConscript b/src/mongo/db/fts/SConscript
index c6ceb5abb4d..a7a6f125c42 100644
--- a/src/mongo/db/fts/SConscript
+++ b/src/mongo/db/fts/SConscript
@@ -31,14 +31,18 @@ stop_word_languages = [
'turkish',
]
-generateStopWordsList = env.Command( [ "stop_words_list.h", "stop_words_list.cpp"],
- [ "generate_stop_words.py"] + [ 'stop_words_%s.txt' % x for x in stop_word_languages ],
- "$PYTHON $SOURCES $TARGETS" )
+generateStopWordsList = env.Command(
+ ["stop_words_list.h", "stop_words_list.cpp"],
+ ["generate_stop_words.py"] + ['stop_words_%s.txt' % x for x in stop_word_languages],
+ "$PYTHON $SOURCES $TARGETS",
+)
env.Alias('generated-sources', generateStopWordsList)
-baseEnv=env.Clone()
+baseEnv = env.Clone()
baseEnv.InjectThirdParty(libraries=['stemmer'])
-baseEnv.Library('base_fts', [
+baseEnv.Library(
+ 'base_fts',
+ [
'fts_index_format.cpp',
'fts_matcher.cpp',
'fts_query_impl.cpp',
@@ -56,14 +60,17 @@ baseEnv.Library('base_fts', [
'stop_words.cpp',
'stop_words_list.cpp',
'tokenizer.cpp',
- ], LIBDEPS=["$BUILD_DIR/mongo/base",
- "$BUILD_DIR/mongo/db/bson/dotted_path_support",
- "$BUILD_DIR/mongo/db/common",
- "$BUILD_DIR/mongo/db/fts/unicode/unicode",
- "$BUILD_DIR/mongo/db/matcher/expressions",
- "$BUILD_DIR/mongo/util/md5",
- "$BUILD_DIR/third_party/shim_stemmer",
- ])
+ ],
+ LIBDEPS=[
+ "$BUILD_DIR/mongo/base",
+ "$BUILD_DIR/mongo/db/bson/dotted_path_support",
+ "$BUILD_DIR/mongo/db/common",
+ "$BUILD_DIR/mongo/db/fts/unicode/unicode",
+ "$BUILD_DIR/mongo/db/matcher/expressions",
+ "$BUILD_DIR/mongo/util/md5",
+ "$BUILD_DIR/third_party/shim_stemmer",
+ ],
+)
env.Library(
target='fts_query_noop',
diff --git a/src/mongo/db/fts/unicode/SConscript b/src/mongo/db/fts/unicode/SConscript
index 2b783364d64..1571f1403c3 100644
--- a/src/mongo/db/fts/unicode/SConscript
+++ b/src/mongo/db/fts/unicode/SConscript
@@ -4,36 +4,39 @@ Import("env")
env = env.Clone()
-env.Command(
+env.Command(
target="codepoints_casefold.cpp",
source=[
"gen_casefold_map.py",
"#/src/third_party/unicode-8.0.0/CaseFolding.txt",
"gen_helper.py",
],
- action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS")
+ action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS",
+)
env.Alias('generated-sources', "codepoints_casefold.cpp")
-env.Command(
+env.Command(
target="codepoints_delimiter_list.cpp",
source=[
"gen_delimiter_list.py",
"#/src/third_party/unicode-8.0.0/PropList.txt",
"gen_helper.py",
],
- action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS")
+ action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS",
+)
env.Alias('generated-sources', "codepoints_delimiter_list.cpp")
-env.Command(
+env.Command(
target="codepoints_diacritic_list.cpp",
source=[
"gen_diacritic_list.py",
"#/src/third_party/unicode-8.0.0/PropList.txt",
"gen_helper.py",
],
- action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS")
+ action="$PYTHON ${SOURCES[0]} ${SOURCES[1]} $TARGETS",
+)
env.Alias('generated-sources', "codepoints_diacritic_list.cpp")
@@ -49,7 +52,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/shell/linenoise_utf8',
- ]
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/db/geo/SConscript b/src/mongo/db/geo/SConscript
index 1a32eefac8a..e43299f0d2a 100644
--- a/src/mongo/db/geo/SConscript
+++ b/src/mongo/db/geo/SConscript
@@ -7,33 +7,23 @@ env = env.Clone()
# Core geometry shape libraries
env.Library(
target="geometry",
- source=[
- "hash.cpp",
- "shapes.cpp",
- "big_polygon.cpp",
- "r2_region_coverer.cpp"
- ],
+ source=["hash.cpp", "shapes.cpp", "big_polygon.cpp", "r2_region_coverer.cpp"],
LIBDEPS=[
- "$BUILD_DIR/mongo/base",
- "$BUILD_DIR/mongo/db/common",
- "$BUILD_DIR/mongo/db/storage/key_string",
- "$BUILD_DIR/third_party/s2/s2"
- ]
+ "$BUILD_DIR/mongo/base", "$BUILD_DIR/mongo/db/common",
+ "$BUILD_DIR/mongo/db/storage/key_string", "$BUILD_DIR/third_party/s2/s2"
+ ],
)
# Geometry / BSON parsing and wrapping
env.Library(
target="geoparser",
- source=[
- "geoparser.cpp",
- "geometry_container.cpp"
- ],
+ source=["geoparser.cpp", "geometry_container.cpp"],
LIBDEPS=[
"$BUILD_DIR/mongo/base",
"$BUILD_DIR/mongo/db/bson/dotted_path_support",
"$BUILD_DIR/third_party/s2/s2",
"geometry",
- ]
+ ],
)
env.CppUnitTest(
@@ -48,5 +38,5 @@ env.CppUnitTest(
"$BUILD_DIR/mongo/db/common",
"geometry",
"geoparser",
- ]
+ ],
)
diff --git a/src/mongo/db/index/SConscript b/src/mongo/db/index/SConscript
index b8e2be6c825..9ccff04ff89 100644
--- a/src/mongo/db/index/SConscript
+++ b/src/mongo/db/index/SConscript
@@ -60,10 +60,7 @@ env.Benchmark(
env.Library(
target='expression_params',
- source=[
- 'expression_params.cpp',
- 's2_common.cpp'
- ],
+ source=['expression_params.cpp', 's2_common.cpp'],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/bson/util/bson_extract',
@@ -73,7 +70,7 @@ env.Library(
'$BUILD_DIR/mongo/db/query/collation/collator_interface',
'$BUILD_DIR/mongo/db/storage/key_string',
'$BUILD_DIR/third_party/s2/s2',
- ]
+ ],
)
env.Library(
@@ -145,7 +142,7 @@ env.Library(
'columnar_index',
'expression_params',
'key_generator',
- ]
+ ],
)
env.Library(
@@ -155,7 +152,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/db/matcher/SConscript b/src/mongo/db/matcher/SConscript
index 4d484b0fadb..a53845a04f4 100644
--- a/src/mongo/db/matcher/SConscript
+++ b/src/mongo/db/matcher/SConscript
@@ -8,7 +8,7 @@ env.Library(
target='path',
source=[
'path.cpp',
- 'path_internal.cpp'
+ 'path_internal.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
diff --git a/src/mongo/db/ops/SConscript b/src/mongo/db/ops/SConscript
index 999ff2b85fe..0b736897acc 100644
--- a/src/mongo/db/ops/SConscript
+++ b/src/mongo/db/ops/SConscript
@@ -79,7 +79,7 @@ env.Library(
source='parsed_update.cpp',
LIBDEPS=[
'$BUILD_DIR/mongo/db/update/update_driver',
- 'parsed_update_array_filters'
+ 'parsed_update_array_filters',
],
)
diff --git a/src/mongo/db/pipeline/SConscript b/src/mongo/db/pipeline/SConscript
index ddde981fbb9..c1cf7801e29 100644
--- a/src/mongo/db/pipeline/SConscript
+++ b/src/mongo/db/pipeline/SConscript
@@ -17,9 +17,9 @@ env.SConscript(
env.Library(
target='change_stream_pre_and_post_images_options',
source=[
- 'change_stream_pre_and_post_images_options.idl'
+ 'change_stream_pre_and_post_images_options.idl',
],
- LIBDEPS=[
+ LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/idl/idl_parser',
],
@@ -46,21 +46,21 @@ env.Library(
'aggregation_request_helper',
'expression_context',
'pipeline',
- ]
+ ],
)
env.Library(
target='field_path',
source=[
'field_path.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/query/query_knobs',
'$BUILD_DIR/mongo/db/server_options',
'$BUILD_DIR/mongo/idl/feature_flag',
- ]
- )
+ ],
+)
env.Library(
target='aggregation_request_helper',
@@ -80,7 +80,7 @@ env.Library(
'$BUILD_DIR/mongo/db/write_concern_options',
'$BUILD_DIR/mongo/idl/idl_parser',
'document_sources_idl',
- ]
+ ],
)
env.Library(
@@ -90,7 +90,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
target='expression_context',
@@ -135,11 +135,11 @@ env.Library(
target='dependencies',
source=[
'dependencies.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/exec/document_value/document_value',
'field_path',
- ]
+ ],
)
env.Library(
@@ -169,7 +169,7 @@ env.Library(
'window_function/window_function_integral.cpp',
'window_function/window_function_shift.cpp',
'window_function/window_function_sum.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/exec/document_value/document_value',
'$BUILD_DIR/mongo/db/query/query_knobs',
@@ -179,9 +179,8 @@ env.Library(
'field_path',
],
LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/db/exec/sort_executor',
- '$BUILD_DIR/mongo/db/index/key_generator'
- ]
+ '$BUILD_DIR/mongo/db/exec/sort_executor', '$BUILD_DIR/mongo/db/index/key_generator'
+ ],
)
env.Library(
@@ -190,12 +189,12 @@ env.Library(
'granularity_rounder.cpp',
'granularity_rounder_powers_of_two.cpp',
'granularity_rounder_preferred_numbers.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/exec/document_value/document_value',
'expression_context',
'field_path',
- ]
+ ],
)
env.Library(
@@ -203,11 +202,11 @@ env.Library(
source=[
'document_source_mock.cpp',
'process_interface/stub_lookup_single_document_process_interface.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/query/query_test_service_context',
'pipeline',
- ]
+ ],
)
env.Library(
@@ -227,12 +226,12 @@ env.Library(
source=[
'lite_parsed_document_source.cpp',
'lite_parsed_pipeline.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/query/common_query_enums_and_helpers',
'$BUILD_DIR/mongo/db/stats/counters',
'aggregation_request_helper',
- ]
+ ],
)
env.Library(
@@ -391,7 +390,7 @@ pipelineEnv.Library(
'$BUILD_DIR/mongo/db/timeseries/timeseries_conversion_util',
'$BUILD_DIR/mongo/db/timeseries/timeseries_options',
'$BUILD_DIR/mongo/rpc/command_status',
- ]
+ ],
)
env.Library(
@@ -441,7 +440,6 @@ env.Library(
],
)
-
env.Library(
target='runtime_constants_idl',
source=[
@@ -487,7 +485,7 @@ env.Library(
env.Library(
target='change_stream_expired_pre_image_remover',
source=[
- 'change_stream_expired_pre_image_remover.cpp'
+ 'change_stream_expired_pre_image_remover.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/change_stream_options_manager',
@@ -499,13 +497,13 @@ env.Library(
'$BUILD_DIR/mongo/db/repl/storage_interface',
'$BUILD_DIR/mongo/util/periodic_runner',
'change_stream_preimage',
- ]
+ ],
)
env.Library(
target='change_stream_pre_image_helpers',
source=[
- 'change_stream_pre_image_helpers.cpp'
+ 'change_stream_pre_image_helpers.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/service_context',
@@ -516,7 +514,7 @@ env.Library(
'$BUILD_DIR/mongo/db/concurrency/lock_manager_defs',
'$BUILD_DIR/mongo/db/dbhelpers',
'$BUILD_DIR/mongo/db/namespace_string',
- ]
+ ],
)
env.Library(
@@ -527,8 +525,7 @@ env.Library(
LIBDEPS=[
"change_stream_pipeline",
],
- LIBDEPS_PRIVATE=[
- ],
+ LIBDEPS_PRIVATE=[],
)
env.CppUnitTest(
@@ -686,7 +683,7 @@ env.CppUnitTest(
'process_interface/mongos_process_interface',
'process_interface/shardsvr_process_interface',
'sharded_agg_helpers',
- ]
+ ],
)
env.Benchmark(
diff --git a/src/mongo/db/pipeline/process_interface/SConscript b/src/mongo/db/pipeline/process_interface/SConscript
index c18bde43241..1230f427a39 100644
--- a/src/mongo/db/pipeline/process_interface/SConscript
+++ b/src/mongo/db/pipeline/process_interface/SConscript
@@ -129,5 +129,5 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/s/sharding_router_test_fixture',
'mongos_process_interface',
'shardsvr_process_interface',
- ]
+ ],
)
diff --git a/src/mongo/db/query/SConscript b/src/mongo/db/query/SConscript
index 1f42baff54c..38ad7e51499 100644
--- a/src/mongo/db/query/SConscript
+++ b/src/mongo/db/query/SConscript
@@ -12,7 +12,7 @@ env.SConscript(
'optimizer',
],
exports=[
- 'env'
+ 'env',
],
)
@@ -86,7 +86,7 @@ env.Library(
LIBDEPS=[
"$BUILD_DIR/mongo/base",
"$BUILD_DIR/mongo/db/matcher/expressions",
- ]
+ ],
)
env.Library(
@@ -101,7 +101,7 @@ env.Library(
"$BUILD_DIR/mongo/base",
"$BUILD_DIR/mongo/db/exec/sbe/query_sbe",
"canonical_query",
- ]
+ ],
)
env.Library(
@@ -268,7 +268,7 @@ env.Library(
'$BUILD_DIR/mongo/idl/feature_flag',
'$BUILD_DIR/mongo/idl/server_parameter',
'$BUILD_DIR/third_party/shim_pcrecpp',
- ]
+ ],
)
env.Library(
@@ -281,8 +281,7 @@ env.Library(
"$BUILD_DIR/mongo/db/service_context",
"collation/collator_factory_mock",
],
- LIBDEPS_PRIVATE=[
- ],
+ LIBDEPS_PRIVATE=[],
)
env.Library(
@@ -329,7 +328,7 @@ env.Library(
'$BUILD_DIR/mongo/db/exec/document_value/document_value',
'$BUILD_DIR/mongo/db/pipeline/expression_context',
],
- )
+)
env.Library(
target="plan_yield_policy",
@@ -345,7 +344,7 @@ env.Library(
'$BUILD_DIR/mongo/db/concurrency/exception_util',
'$BUILD_DIR/mongo/db/storage/recovery_unit_base',
],
- )
+)
env.CppUnitTest(
target="db_query_test",
diff --git a/src/mongo/db/query/ce/SConscript b/src/mongo/db/query/ce/SConscript
index 8ab2ca62f51..c570a88b23f 100644
--- a/src/mongo/db/query/ce/SConscript
+++ b/src/mongo/db/query/ce/SConscript
@@ -12,5 +12,5 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/exec/sbe/query_sbe_abt',
'$BUILD_DIR/mongo/db/query/optimizer/optimizer',
- ]
-) \ No newline at end of file
+ ],
+)
diff --git a/src/mongo/db/query/collation/SConscript b/src/mongo/db/query/collation/SConscript
index 653878dc5be..79578fe98b8 100644
--- a/src/mongo/db/query/collation/SConscript
+++ b/src/mongo/db/query/collation/SConscript
@@ -78,8 +78,7 @@ if not use_system_version_of_library("icu"):
("U_DISABLE_RENAMING", 1),
("U_STATIC_IMPLEMENTATION", 1),
("U_USING_ICU_NAMESPACE", 0),
- ],
- )
+ ], )
icuEnv.Library(
target="collator_icu",
diff --git a/src/mongo/db/query/datetime/SConscript b/src/mongo/db/query/datetime/SConscript
index e8a45118d4f..6bfd4e3ed63 100644
--- a/src/mongo/db/query/datetime/SConscript
+++ b/src/mongo/db/query/datetime/SConscript
@@ -8,25 +8,21 @@ timeZoneEnv = env.Clone()
timeZoneEnv.InjectThirdParty(libraries=['timelib'])
timeZoneEnv.Library(
target='date_time_support',
- source=[
- 'date_time_support.cpp'
- ],
+ source=['date_time_support.cpp'],
LIBDEPS=[
'$BUILD_DIR/mongo/db/service_context',
'$BUILD_DIR/third_party/shim_timelib',
- ]
+ ],
)
timeZoneEnv.Library(
target='init_timezone_data',
- source=[
- 'init_timezone_data.cpp'
- ],
+ source=['init_timezone_data.cpp'],
LIBDEPS=[
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/third_party/shim_timelib',
'date_time_support',
- ]
+ ],
)
timeZoneEnv.CppUnitTest(
@@ -36,7 +32,7 @@ timeZoneEnv.CppUnitTest(
],
LIBDEPS=[
'date_time_support',
- ]
+ ],
)
timeZoneEnv.CppLibfuzzerTest(
@@ -46,5 +42,5 @@ timeZoneEnv.CppLibfuzzerTest(
],
LIBDEPS=[
'date_time_support',
- ]
+ ],
)
diff --git a/src/mongo/db/query/optimizer/SConscript b/src/mongo/db/query/optimizer/SConscript
index 6d49e5d92bd..c91ceec6cae 100644
--- a/src/mongo/db/query/optimizer/SConscript
+++ b/src/mongo/db/query/optimizer/SConscript
@@ -9,7 +9,7 @@ env.SConscript(
"algebra",
],
exports=[
- 'env'
+ 'env',
],
)
@@ -41,7 +41,7 @@ env.Library(
"utils/abt_hash.cpp",
"utils/interval_utils.cpp",
"utils/memo_utils.cpp",
- "utils/utils.cpp"
+ "utils/utils.cpp",
],
LIBDEPS=[
"$BUILD_DIR/mongo/db/exec/sbe/query_sbe_values",
@@ -73,5 +73,5 @@ env.CppUnitTest(
LIBDEPS=[
"optimizer",
"unit_test_utils",
- ]
+ ],
)
diff --git a/src/mongo/db/query/optimizer/algebra/SConscript b/src/mongo/db/query/optimizer/algebra/SConscript
index 0d2a48c24d3..b6a73de1e35 100644
--- a/src/mongo/db/query/optimizer/algebra/SConscript
+++ b/src/mongo/db/query/optimizer/algebra/SConscript
@@ -9,7 +9,5 @@ env.CppUnitTest(
source=[
'algebra_test.cpp',
],
- LIBDEPS=[
-
- ]
+ LIBDEPS=[],
)
diff --git a/src/mongo/db/repl/SConscript b/src/mongo/db/repl/SConscript
index f12237cf395..7f496a55383 100644
--- a/src/mongo/db/repl/SConscript
+++ b/src/mongo/db/repl/SConscript
@@ -14,7 +14,7 @@ env.Library(
'$BUILD_DIR/mongo/client/read_preference',
'$BUILD_DIR/mongo/idl/feature_flag',
'$BUILD_DIR/mongo/idl/server_parameter',
- ]
+ ],
)
env.Library(
@@ -24,7 +24,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
@@ -36,7 +36,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
'oplog_entry',
- ]
+ ],
)
env.Library(
@@ -127,7 +127,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'replication_auth',
- ]
+ ],
)
env.Library(
@@ -173,7 +173,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'repl_server_parameters',
- ]
+ ],
)
env.Library(
@@ -296,8 +296,7 @@ env.Library(
source=[
'replication_recovery.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/index_builds_coordinator_interface',
@@ -389,7 +388,6 @@ env.Library(
],
)
-
env.Library(
target='oplog_interface_local',
source=[
@@ -550,7 +548,7 @@ env.Library(
env.Library(
target='optime_base',
source=[
- 'optime_base.idl'
+ 'optime_base.idl',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -626,7 +624,7 @@ env.Library(
'$BUILD_DIR/mongo/db/namespace_string',
'oplog_entry',
'optime',
- ]
+ ],
)
env.Library(
@@ -639,7 +637,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/common',
- '$BUILD_DIR/mongo/db/exec/document_value/document_value'
+ '$BUILD_DIR/mongo/db/exec/document_value/document_value',
],
)
@@ -662,7 +660,7 @@ env.Library(
],
LIBDEPS=[
'replica_set_messages',
- ]
+ ],
)
env.Library(
@@ -686,7 +684,7 @@ env.Library(
'$BUILD_DIR/mongo/db/catalog/commit_quorum_options',
'$BUILD_DIR/mongo/idl/server_parameter',
'repl_server_parameters',
- ]
+ ],
)
env.Library(
@@ -816,26 +814,30 @@ env.Library(
],
)
-env.Library('read_concern_args',
- [
- 'read_concern_args.cpp'
- ],
- LIBDEPS=[
- '$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/bson/util/bson_extract',
- '$BUILD_DIR/mongo/db/logical_time',
- '$BUILD_DIR/mongo/db/read_write_concern_provenance',
- 'optime',
- ])
+env.Library(
+ 'read_concern_args',
+ [
+ 'read_concern_args.cpp',
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/base',
+ '$BUILD_DIR/mongo/bson/util/bson_extract',
+ '$BUILD_DIR/mongo/db/logical_time',
+ '$BUILD_DIR/mongo/db/read_write_concern_provenance',
+ 'optime',
+ ],
+)
-env.Library('speculative_majority_read_info',
- [
- 'speculative_majority_read_info.cpp'
- ],
- LIBDEPS=[
- '$BUILD_DIR/mongo/base',
- 'optime',
- ])
+env.Library(
+ 'speculative_majority_read_info',
+ [
+ 'speculative_majority_read_info.cpp',
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/base',
+ 'optime',
+ ],
+)
env.Library(
target='replica_set_messages',
@@ -873,7 +875,7 @@ env.Library(
'$BUILD_DIR/mongo/idl/server_parameter',
'repl_server_parameters',
'split_horizon',
- ]
+ ],
)
env.Library(
@@ -1027,7 +1029,7 @@ env.Library(
'$BUILD_DIR/mongo/util/progress_meter',
'repl_server_parameters',
'replication_auth',
- ]
+ ],
)
env.Library(
@@ -1056,7 +1058,7 @@ env.Library(
'$BUILD_DIR/mongo/util/progress_meter',
'oplog',
'repl_server_parameters',
- ]
+ ],
)
env.Library(
@@ -1149,7 +1151,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
@@ -1166,7 +1168,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'repl_server_parameters',
'replication_consistency_markers_impl',
- ]
+ ],
)
env.Library(
@@ -1184,7 +1186,7 @@ env.Library(
'$BUILD_DIR/mongo/db/namespace_string',
'$BUILD_DIR/mongo/idl/idl_parser',
'read_concern_args',
- ]
+ ],
)
env.Library(
@@ -1210,14 +1212,14 @@ env.Library(
'repl_sync_shared_data',
'rollback_checker',
'storage_interface',
- 'tenant_migration_access_blocker'
+ 'tenant_migration_access_blocker',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/commands/feature_compatibility_parsers',
'$BUILD_DIR/mongo/db/index_builds_coordinator_interface',
'$BUILD_DIR/mongo/executor/scoped_task_executor',
'repl_server_parameters',
- ]
+ ],
)
env.Library(
@@ -1401,7 +1403,7 @@ env.Library(
'tenant_migration_cloners',
'tenant_migration_state_machine_idl',
'tenant_oplog_processing',
- ]
+ ],
)
env.Library(
@@ -1430,7 +1432,7 @@ env.Library(
'repl_coordinator_interface',
'tenant_migration_decoration',
'tenant_migration_errors',
- 'tenant_migration_state_machine_idl'
+ 'tenant_migration_state_machine_idl',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/catalog/local_oplog_info',
@@ -1530,7 +1532,7 @@ env.Library(
'oplog',
'oplog_entry',
'replmocks',
- ]
+ ],
)
env.Library(
@@ -1611,7 +1613,7 @@ if wiredtiger:
'$BUILD_DIR/mongo/util/version_impl',
'idempotency_test_fixture',
'tenant_migration_donor_service',
- ]
+ ],
)
env.CppUnitTest(
@@ -1779,7 +1781,7 @@ env.CppUnitTest(
'repl_coordinator_test_fixture',
'repl_server_parameters',
'topology_coordinator',
- ]
+ ],
)
env.CppUnitTest(
@@ -1793,7 +1795,7 @@ env.CppUnitTest(
'repl_coordinator_impl',
'repl_coordinator_test_fixture',
'topology_version_observer',
- ]
+ ],
)
env.Library(
@@ -1960,7 +1962,7 @@ env.Library(
target='tenant_oplog_processing',
source=[
'tenant_oplog_batcher.cpp',
- 'tenant_oplog_applier.cpp'
+ 'tenant_oplog_applier.cpp',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
@@ -1970,7 +1972,7 @@ env.Library(
'oplog',
'oplog_application',
'oplog_application_interface',
- 'repl_server_parameters'
+ 'repl_server_parameters',
],
)
diff --git a/src/mongo/db/s/SConscript b/src/mongo/db/s/SConscript
index 3d14f8ee931..d6900c40b54 100644
--- a/src/mongo/db/s/SConscript
+++ b/src/mongo/db/s/SConscript
@@ -36,8 +36,8 @@ env.Library(
'$BUILD_DIR/mongo/s/sharding_routing_table',
],
LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/db/write_block_bypass'
- ]
+ '$BUILD_DIR/mongo/db/write_block_bypass',
+ ],
)
env.Library(
@@ -187,7 +187,7 @@ env.Library(
'$BUILD_DIR/mongo/db/repl/replica_set_aware_service',
'$BUILD_DIR/mongo/db/rw_concern_d',
'sharding_api_d',
- ]
+ ],
)
env.Library(
@@ -222,14 +222,14 @@ env.Library(
'$BUILD_DIR/mongo/executor/task_executor_pool',
'$BUILD_DIR/mongo/s/grid',
'sharding_api_d',
- ]
+ ],
)
env.Library(
target='forwardable_operation_metadata',
source=[
'forwardable_operation_metadata.cpp',
- 'forwardable_operation_metadata.idl'
+ 'forwardable_operation_metadata.idl',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -237,7 +237,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/write_block_bypass',
- ]
+ ],
)
env.Library(
@@ -485,7 +485,7 @@ env.Library(
'$BUILD_DIR/mongo/db/pipeline/sharded_agg_helpers',
'$BUILD_DIR/mongo/s/sessions_collection_sharded',
'$BUILD_DIR/mongo/s/sharding_api',
- ]
+ ],
)
env.Library(
@@ -676,7 +676,7 @@ env.CppUnitTest(
'topology_time_ticker_test.cpp',
'type_lockpings_test.cpp',
'type_locks_test.cpp',
- 'vector_clock_config_server_test.cpp'
+ 'vector_clock_config_server_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/auth/authmocks',
@@ -701,7 +701,7 @@ env.Benchmark(
LIBDEPS=[
'$BUILD_DIR/mongo/db/auth/authmocks',
'$BUILD_DIR/mongo/db/auth/authorization_manager_global',
- 'sharding_runtime_d'
+ 'sharding_runtime_d',
],
)
diff --git a/src/mongo/db/serverless/SConscript b/src/mongo/db/serverless/SConscript
index e724d78a83e..a06609a6e55 100644
--- a/src/mongo/db/serverless/SConscript
+++ b/src/mongo/db/serverless/SConscript
@@ -11,7 +11,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/idl/basic_types',
- '$BUILD_DIR/mongo/idl/idl_parser'
+ '$BUILD_DIR/mongo/idl/idl_parser',
],
)
@@ -24,7 +24,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/client/connection_string',
'$BUILD_DIR/mongo/db/repl/tenant_migration_utils',
- '$BUILD_DIR/mongo/idl/idl_parser'
+ '$BUILD_DIR/mongo/idl/idl_parser',
],
)
@@ -42,20 +42,20 @@ env.Library(
'$BUILD_DIR/mongo/db/repl/tenant_migration_utils',
'$BUILD_DIR/mongo/idl/idl_parser',
'shard_split_donor_service',
- ]
+ ],
)
env.Library(
target='shard_split_utils',
source=[
- 'shard_split_utils.cpp'
+ 'shard_split_utils.cpp',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/concurrency/exception_util',
'$BUILD_DIR/mongo/db/dbhelpers',
'$BUILD_DIR/mongo/db/repl/replica_set_messages',
'shard_split_state_machine',
- ]
+ ],
)
env.Library(
@@ -78,8 +78,8 @@ env.Library(
'$BUILD_DIR/mongo/db/namespace_string',
'$BUILD_DIR/mongo/db/repl/oplog',
'$BUILD_DIR/mongo/db/repl/tenant_migration_access_blocker',
- 'shard_split_utils'
- ]
+ 'shard_split_utils',
+ ],
)
env.CppUnitTest(
@@ -99,5 +99,5 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/dbtests/mocklib',
'shard_split_donor_service',
'shard_split_utils',
- ]
+ ],
)
diff --git a/src/mongo/db/sorter/SConscript b/src/mongo/db/sorter/SConscript
index ae8184f222d..fc796b9319f 100644
--- a/src/mongo/db/sorter/SConscript
+++ b/src/mongo/db/sorter/SConscript
@@ -29,5 +29,5 @@ env.Library(
LIBDEPS=[
"$BUILD_DIR/mongo/base",
'$BUILD_DIR/mongo/idl/idl_parser',
- ]
+ ],
)
diff --git a/src/mongo/db/stats/SConscript b/src/mongo/db/stats/SConscript
index 17bbe0cb144..4769af29bea 100644
--- a/src/mongo/db/stats/SConscript
+++ b/src/mongo/db/stats/SConscript
@@ -19,7 +19,7 @@ env.Library(
target='top',
source=[
'top.cpp',
- 'operation_latency_histogram.cpp'
+ 'operation_latency_histogram.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/server_options_core',
@@ -38,7 +38,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/shared_request_handling',
'$BUILD_DIR/mongo/rpc/client_metadata',
- ]
+ ],
)
env.Library(
@@ -94,7 +94,7 @@ env.Library(
env.Library(
target="transaction_stats",
source=[
- "single_transaction_stats.cpp"
+ "single_transaction_stats.cpp",
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -109,7 +109,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/concurrency/lock_manager'
+ '$BUILD_DIR/mongo/db/concurrency/lock_manager',
],
)
@@ -126,9 +126,9 @@ env.Library(
'$BUILD_DIR/mongo/db/catalog/index_catalog',
'$BUILD_DIR/mongo/db/commands/server_status',
'$BUILD_DIR/mongo/db/db_raii',
- '$BUILD_DIR/mongo/db/dbdirectclient', # TODO (SERVER-64162) remove
+ '$BUILD_DIR/mongo/db/dbdirectclient', # TODO (SERVER-64162) remove
'$BUILD_DIR/mongo/db/index/index_access_method',
- '$BUILD_DIR/mongo/db/pipeline/aggregation_request_helper', # TODO (SERVER-64162) remove
+ '$BUILD_DIR/mongo/db/pipeline/aggregation_request_helper', # TODO (SERVER-64162) remove
'$BUILD_DIR/mongo/db/pipeline/document_sources_idl',
'$BUILD_DIR/mongo/db/timeseries/bucket_catalog',
'$BUILD_DIR/mongo/db/timeseries/timeseries_stats',
diff --git a/src/mongo/db/storage/SConscript b/src/mongo/db/storage/SConscript
index 6097874f3f5..9ac78a1409f 100644
--- a/src/mongo/db/storage/SConscript
+++ b/src/mongo/db/storage/SConscript
@@ -10,7 +10,7 @@ env.SConscript(
'wiredtiger',
],
exports=[
- 'env'
+ 'env',
],
)
@@ -209,7 +209,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/service_context'
+ '$BUILD_DIR/mongo/db/service_context',
],
)
@@ -285,11 +285,10 @@ env.Library(
],
)
-
env.Library(
target='recovery_unit_test_harness',
source=[
- 'recovery_unit_test_harness.cpp'
+ 'recovery_unit_test_harness.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/concurrency/lock_manager',
@@ -314,7 +313,7 @@ env.Library(
env.Library(
target='storage_change_lock',
source=[
- 'storage_change_lock.cpp'
+ 'storage_change_lock.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -325,10 +324,9 @@ env.Library(
target='storage_engine_common',
source=[
'storage_engine_init.cpp',
- 'storage_engine_change_context.cpp'
- ],
- LIBDEPS=[
+ 'storage_engine_change_context.cpp',
],
+ LIBDEPS=[],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/concurrency/lock_manager',
'$BUILD_DIR/mongo/db/service_context',
@@ -351,8 +349,8 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/commands/server_status',
- 'backup_cursor_hooks'
- ]
+ 'backup_cursor_hooks',
+ ],
)
env.Library(
@@ -363,7 +361,7 @@ env.Library(
LIBDEPS_PRIVATE=[
"$BUILD_DIR/mongo/base",
'$BUILD_DIR/mongo/db/namespace_string',
- ]
+ ],
)
env.Library(
@@ -420,7 +418,7 @@ env.Library(
env.Benchmark(
target='storage_key_string_bm',
source=[
- 'key_string_bm.cpp'
+ 'key_string_bm.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -431,7 +429,7 @@ env.Benchmark(
env.Benchmark(
target='storage_record_id_bm',
source=[
- 'record_id_bm.cpp'
+ 'record_id_bm.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -448,7 +446,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'encryption_hooks',
'storage_options',
- ]
+ ],
)
env.Library(
diff --git a/src/mongo/db/storage/kv/SConscript b/src/mongo/db/storage/kv/SConscript
index 67f6904300a..9d98220d97d 100644
--- a/src/mongo/db/storage/kv/SConscript
+++ b/src/mongo/db/storage/kv/SConscript
@@ -35,4 +35,3 @@ env.Library(
'$BUILD_DIR/mongo/util/clock_source_mock',
],
)
-
diff --git a/src/mongo/db/storage/wiredtiger/SConscript b/src/mongo/db/storage/wiredtiger/SConscript
index 1d25c612bba..f322d6c6b92 100644
--- a/src/mongo/db/storage/wiredtiger/SConscript
+++ b/src/mongo/db/storage/wiredtiger/SConscript
@@ -15,7 +15,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/service_context'
+ '$BUILD_DIR/mongo/db/service_context',
],
)
@@ -157,7 +157,7 @@ wtEnv.CppUnitTest(
'$BUILD_DIR/mongo/util/clock_source_mock',
'storage_wiredtiger',
'storage_wiredtiger_core',
- ]
+ ],
)
wtEnv.Library(
@@ -197,7 +197,6 @@ wtEnv.Library(
],
)
-
wtEnv.Library(
target='storage_wiredtiger_import',
source=[
@@ -214,7 +213,6 @@ wtEnv.Library(
],
)
-
wtEnv.CppUnitTest(
target='storage_wiredtiger_record_store_and_index_test',
source=[
diff --git a/src/mongo/db/timeseries/SConscript b/src/mongo/db/timeseries/SConscript
index 1e38fb6b81f..8aa49e5fd8c 100644
--- a/src/mongo/db/timeseries/SConscript
+++ b/src/mongo/db/timeseries/SConscript
@@ -15,7 +15,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/idl/idl_parser',
'$BUILD_DIR/mongo/idl/server_parameter',
- '$BUILD_DIR/mongo/util/processinfo'
+ '$BUILD_DIR/mongo/util/processinfo',
],
)
@@ -49,7 +49,7 @@ env.Library(
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/db/storage/storage_options',
'$BUILD_DIR/mongo/util/fail_point',
- ]
+ ],
)
env.Library(
diff --git a/src/mongo/db/views/SConscript b/src/mongo/db/views/SConscript
index 1515599e61b..c0b69a4e6ba 100644
--- a/src/mongo/db/views/SConscript
+++ b/src/mongo/db/views/SConscript
@@ -57,7 +57,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/timeseries/timeseries_conversion_util',
'$BUILD_DIR/mongo/db/timeseries/timeseries_options',
- ]
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/dbtests/SConscript b/src/mongo/dbtests/SConscript
index f735921a1df..59e5fe7547c 100644
--- a/src/mongo/dbtests/SConscript
+++ b/src/mongo/dbtests/SConscript
@@ -52,7 +52,7 @@ env.Library(
'mock/mock_conn_registry.cpp',
'mock/mock_dbclient_connection.cpp',
'mock/mock_remote_db_server.cpp',
- 'mock/mock_replica_set.cpp'
+ 'mock/mock_replica_set.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/client/clientdriver_network',
diff --git a/src/mongo/embedded/SConscript b/src/mongo/embedded/SConscript
index 879b5033969..b4243ca1ebf 100644
--- a/src/mongo/embedded/SConscript
+++ b/src/mongo/embedded/SConscript
@@ -8,15 +8,14 @@ Import("wiredtiger")
env = env.Clone()
-env.AppendUnique(
- CPPPATH=["$BUILD_DIR/mongo/embedded"],
-)
+env.AppendUnique(CPPPATH=["$BUILD_DIR/mongo/embedded"], )
# Inject this before we call the SDK directory SConscripts so that
# they can both use it.
sdkEnv = env.Clone()
+
def mongo_export_file_generator(target, source, env, for_signature):
if env.ToolchainIs('msvc'):
script = env.File(env.subst("${TARGET.base}.def", target=target))
@@ -30,6 +29,7 @@ def mongo_export_file_generator(target, source, env, for_signature):
else:
pass
+
# We really only want to use the mapfile if we are doing an SDK build. In an ordinary
# dynamic build, we would end up building the normal library with an export map
# but many of its symbols should in fact be coming from other libraries, and we
@@ -44,7 +44,7 @@ env.SConscript(
'stitch_support',
],
exports={
- 'env' : sdkEnv,
+ 'env': sdkEnv,
},
)
@@ -116,8 +116,9 @@ env.Library(
'$BUILD_DIR/mongo/db/vector_clock_trivial',
'$BUILD_DIR/mongo/db/wire_version',
'$BUILD_DIR/mongo/rpc/client_metadata',
- '$BUILD_DIR/mongo/util/latch_analyzer' if get_option('use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/latch_analyzer'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/options_parser/options_parser',
'$BUILD_DIR/mongo/util/version_impl',
- ]
+ ],
)
diff --git a/src/mongo/embedded/mongo_embedded/SConscript b/src/mongo/embedded/mongo_embedded/SConscript
index 8f84ee065d5..39336394eba 100644
--- a/src/mongo/embedded/mongo_embedded/SConscript
+++ b/src/mongo/embedded/mongo_embedded/SConscript
@@ -19,18 +19,14 @@ env.AutoInstall(
)
mongoEmbeddedEnv = env.Clone()
-mongoEmbeddedEnv.AppendUnique(
- CPPDEFINES=[
- 'MONGO_EMBEDDED_COMPILING',
- ],
-)
+mongoEmbeddedEnv.AppendUnique(CPPDEFINES=[
+ 'MONGO_EMBEDDED_COMPILING',
+], )
if get_option('link-model') == 'static':
- mongoEmbeddedEnv.AppendUnique(
- CPPDEFINES=[
- 'MONGO_EMBEDDED_STATIC',
- ],
- )
+ mongoEmbeddedEnv.AppendUnique(CPPDEFINES=[
+ 'MONGO_EMBEDDED_STATIC',
+ ], )
elif get_option('link-model') == 'dynamic-sdk':
# TODO(SERVER-59134): This fails to honor the libdeps-debug flag
mongoEmbeddedEnv['LIBDEPS_SHLIBEMITTER'] = partial(
@@ -39,11 +35,9 @@ elif get_option('link-model') == 'dynamic-sdk':
visibility_map=libdeps.dependency_visibility_honored,
)
-mongoEmbeddedEnv.AppendUnique(
- SHLINKFLAGS=[
- '$MONGO_EXPORT_FILE_SHLINKFLAGS',
- ],
-)
+mongoEmbeddedEnv.AppendUnique(SHLINKFLAGS=[
+ '$MONGO_EXPORT_FILE_SHLINKFLAGS',
+], )
if mongoEmbeddedEnv.TargetOSIs('darwin'):
# The current version and compatibility are the *minor* ABI
@@ -57,9 +51,7 @@ if mongoEmbeddedEnv.TargetOSIs('darwin'):
SHLINKFLAGS=[
'-Wl,-current_version,1',
'-Wl,-compatibility_version,1',
- ],
- )
-
+ ], )
mongoEmbeddedTargets = mongoEmbeddedEnv.Library(
target='mongo_embedded',
diff --git a/src/mongo/embedded/mongoc_embedded/SConscript b/src/mongo/embedded/mongoc_embedded/SConscript
index 69b5d150b20..57cead4cc65 100644
--- a/src/mongo/embedded/mongoc_embedded/SConscript
+++ b/src/mongo/embedded/mongoc_embedded/SConscript
@@ -18,33 +18,29 @@ env.AutoInstall(
AIB_ROLE='base',
)
+
def create_mongoc_env(env):
mongocEnv = env.Clone()
mongocEnv.AppendUnique(LIBS=['bson-1.0', 'mongoc-1.0'])
return mongocEnv
+
mongocEmbeddedEnv = create_mongoc_env(env)
-mongocEmbeddedEnv.AppendUnique(
- CPPDEFINES=[
- 'MONGOC_EMBEDDED_COMPILING',
- ],
-)
+mongocEmbeddedEnv.AppendUnique(CPPDEFINES=[
+ 'MONGOC_EMBEDDED_COMPILING',
+], )
if get_option('link-model') == 'static':
- mongocEmbeddedEnv.AppendUnique(
- CPPDEFINES=[
- 'MONGOC_EMBEDDED_STATIC',
- ],
- )
+ mongocEmbeddedEnv.AppendUnique(CPPDEFINES=[
+ 'MONGOC_EMBEDDED_STATIC',
+ ], )
# Please see the note in ../mongo_embedded/SConscript about how to
# interpret and adjust the current and compatibility versinos.
-mongocEmbeddedEnv.AppendUnique(
- SHLINKFLAGS=[
- '$MONGO_EXPORT_FILE_SHLINKFLAGS',
- ],
-)
+mongocEmbeddedEnv.AppendUnique(SHLINKFLAGS=[
+ '$MONGO_EXPORT_FILE_SHLINKFLAGS',
+], )
if mongocEmbeddedEnv.TargetOSIs('darwin'):
# Please see the note in ../mongo_embedded/SConscript about how to
@@ -53,8 +49,7 @@ if mongocEmbeddedEnv.TargetOSIs('darwin'):
SHLINKFLAGS=[
'-Wl,-current_version,1',
'-Wl,-compatibility_version,1',
- ],
- )
+ ], )
mongocEmbeddedTargets = mongocEmbeddedEnv.Library(
target='mongoc_embedded',
@@ -72,7 +67,7 @@ env.AutoInstall(
'$PREFIX_INCLUDEDIR/mongoc_embedded/v1/mongoc_embedded',
source=['mongoc_embedded.h'],
AIB_COMPONENT='embedded',
- AIB_ROLE='dev'
+ AIB_ROLE='dev',
)
yamlEnv = env.Clone()
diff --git a/src/mongo/embedded/stitch_support/SConscript b/src/mongo/embedded/stitch_support/SConscript
index 8ab94cb83c9..63e178ea475 100644
--- a/src/mongo/embedded/stitch_support/SConscript
+++ b/src/mongo/embedded/stitch_support/SConscript
@@ -9,18 +9,14 @@ Import("get_option")
env = env.Clone()
stitchSupportEnv = env.Clone()
-stitchSupportEnv.AppendUnique(
- CPPDEFINES=[
- 'STITCH_SUPPORT_COMPILING',
- ],
-)
+stitchSupportEnv.AppendUnique(CPPDEFINES=[
+ 'STITCH_SUPPORT_COMPILING',
+], )
if get_option('link-model') == 'static':
- stitchSupportEnv.AppendUnique(
- CPPDEFINES=[
- 'STITCH_SUPPORT_STATIC',
- ],
- )
+ stitchSupportEnv.AppendUnique(CPPDEFINES=[
+ 'STITCH_SUPPORT_STATIC',
+ ], )
elif get_option('link-model') == 'dynamic-sdk':
# TODO(SERVER-59134): This fails to honor the libdeps-debug flag
stitchSupportEnv['LIBDEPS_SHLIBEMITTER'] = partial(
@@ -31,11 +27,9 @@ elif get_option('link-model') == 'dynamic-sdk':
# Please see the note in ../mongo_embedded/SConscript about how to
# interpret and adjust the current and compatibility versions.
-stitchSupportEnv.AppendUnique(
- SHLINKFLAGS=[
- '$MONGO_EXPORT_FILE_SHLINKFLAGS',
- ],
-)
+stitchSupportEnv.AppendUnique(SHLINKFLAGS=[
+ '$MONGO_EXPORT_FILE_SHLINKFLAGS',
+], )
stitchSupportTargets = stitchSupportEnv.Library(
target='stitch_support',
diff --git a/src/mongo/executor/SConscript b/src/mongo/executor/SConscript
index f60bca56216..33c76ab6cb3 100644
--- a/src/mongo/executor/SConscript
+++ b/src/mongo/executor/SConscript
@@ -16,7 +16,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/server_feature_flags',
'$BUILD_DIR/mongo/idl/feature_flag',
- ]
+ ],
)
env.Library(
@@ -25,7 +25,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/base/system_error',
- ]
+ ],
)
env.Library(
@@ -38,7 +38,7 @@ env.Library(
'$BUILD_DIR/mongo/db/api_parameters',
'$BUILD_DIR/mongo/rpc/metadata',
'$BUILD_DIR/mongo/util/net/network',
- ]
+ ],
)
env.Library(
@@ -51,7 +51,7 @@ env.Library(
'$BUILD_DIR/mongo/util/net/network',
'remote_command',
'task_executor_interface',
- ]
+ ],
)
env.Library(
@@ -62,7 +62,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'remote_command',
- ]
+ ],
)
env.Library(
@@ -73,7 +73,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/util/fail_point',
'task_executor_interface',
- ]
+ ],
)
env.Library(
@@ -83,7 +83,7 @@ env.Library(
],
LIBDEPS=[
'task_executor_interface',
- ]
+ ],
)
env.Library(
@@ -109,7 +109,7 @@ env.Library(
env.Library(
target='connection_pool_executor',
source=[
- 'connection_pool.cpp',
+ 'connection_pool.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -132,7 +132,7 @@ env.Library(
'$BUILD_DIR/mongo/db/query/command_request_response',
'network_interface_mock',
'task_executor_interface',
- ]
+ ],
)
env.Library(
@@ -142,7 +142,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/service_context',
- ]
+ ],
)
env.Library(
@@ -172,13 +172,13 @@ env.Library(
'$BUILD_DIR/mongo/transport/transport_layer_manager',
'connection_pool_executor',
'network_interface',
- ]
+ ],
)
env.Library(
target='network_interface_fixture',
source=[
- 'network_interface_integration_fixture.cpp'
+ 'network_interface_integration_fixture.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/executor/network_interface_factory',
@@ -188,18 +188,15 @@ env.Library(
'$BUILD_DIR/mongo/unittest/integration_test_main',
'$BUILD_DIR/mongo/util/concurrency/thread_pool',
'$BUILD_DIR/mongo/util/version_impl',
- ]
+ ],
)
env.Library(
- target='network_interface_factory',
- source=[
+ target='network_interface_factory', source=[
'network_interface_factory.cpp',
- ],
- LIBDEPS=[
+ ], LIBDEPS=[
'connection_pool_executor',
- ],
- LIBDEPS_PRIVATE=[
+ ], LIBDEPS_PRIVATE=[
'egress_tag_closer_manager',
'network_interface',
'network_interface_tl',
@@ -209,14 +206,14 @@ env.Library(
target='task_executor_test_fixture',
source=[
'task_executor_test_common.cpp',
- 'task_executor_test_fixture.cpp'
+ 'task_executor_test_fixture.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/unittest/unittest',
'$BUILD_DIR/mongo/util/clock_source_mock',
'network_interface_mock',
'task_executor_interface',
- ]
+ ],
)
env.Library(
@@ -231,7 +228,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'network_interface',
- ]
+ ],
)
env.Library(
@@ -248,11 +245,11 @@ env.Library(
target='thread_pool_task_executor_test_fixture',
source=[
'thread_pool_task_executor_test_fixture.cpp',
- ],
+ ],
LIBDEPS=[
'task_executor_test_fixture',
'thread_pool_task_executor',
- ]
+ ],
)
env.Library(
diff --git a/src/mongo/idl/SConscript b/src/mongo/idl/SConscript
index da648b3cf4b..6861fd9821a 100644
--- a/src/mongo/idl/SConscript
+++ b/src/mongo/idl/SConscript
@@ -25,13 +25,13 @@ env.Library(
source=[
'command_generic_argument.cpp',
'generic_argument.idl',
- 'idl_parser.cpp'
+ 'idl_parser.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/commands/server_status_core',
'$BUILD_DIR/mongo/db/tenant_id',
- ]
+ ],
)
env.Library(
@@ -72,7 +72,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/dbdirectclient',
'$BUILD_DIR/mongo/db/repl/replica_set_aware_service',
- ]
+ ],
)
env.Library(
@@ -98,7 +98,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'cluster_server_parameter',
- ]
+ ],
)
env.CppUnitTest(
@@ -183,7 +183,6 @@ env.CppUnitTest(
'unittest.idl',
'unittest_import.idl',
],
-
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/auth/authprivilege',
diff --git a/src/mongo/installer/SConscript b/src/mongo/installer/SConscript
index 5bd89fe9d74..5bbf8db4c98 100644
--- a/src/mongo/installer/SConscript
+++ b/src/mongo/installer/SConscript
@@ -21,12 +21,12 @@ if env.TargetOSIs('windows'):
redist_file = env['MSVS'].get('VCREDISTEXE', None)
if not redist_file:
- env.FatalError('Required CRT redistributable not found; cannot build distribution package')
+ env.FatalError('Required CRT redistributable not found; cannot build distribution package')
env.AutoInstall(
target='$PREFIX_BINDIR',
source=[
- "${MSVS['VCREDISTEXE']}"
+ "${MSVS['VCREDISTEXE']}",
],
AIB_COMPONENT="dist",
AIB_ROLE="runtime",
diff --git a/src/mongo/installer/compass/SConscript b/src/mongo/installer/compass/SConscript
index 9a577142181..ecb6de43d0c 100644
--- a/src/mongo/installer/compass/SConscript
+++ b/src/mongo/installer/compass/SConscript
@@ -1,7 +1,7 @@
# -*- mode: python; -*-
Import([
- "env"
+ "env",
])
env = env.Clone()
diff --git a/src/mongo/installer/msi/SConscript b/src/mongo/installer/msi/SConscript
index 9b8cdc4033f..9bc64115bde 100644
--- a/src/mongo/installer/msi/SConscript
+++ b/src/mongo/installer/msi/SConscript
@@ -35,14 +35,17 @@ env['WIXUIEXT'] = r'$WIXPATH\WixUIExtension.dll'
env['WIXUTILEXT'] = r'$WIXPATH\WixUtilExtension.dll'
if not 'VCREDISTMERGEMODULEPATH' in env['MSVS']:
- print("SCons tool setup did not configure the path to the vcredist merge modules, disabling MSI installer")
+ print(
+ "SCons tool setup did not configure the path to the vcredist merge modules, disabling MSI installer"
+ )
Return()
-sourcesList = [ "BinaryFragment.wxs",
- "FeatureFragment.wxs",
- "LicensingFragment.wxs",
- "UIFragment.wxs",
- ]
+sourcesList = [
+ "BinaryFragment.wxs",
+ "FeatureFragment.wxs",
+ "LicensingFragment.wxs",
+ "UIFragment.wxs",
+]
# Need to do this in order to get scons to translate path separators into native format
buildDir = env.Dir("$BUILD_DIR").path
@@ -61,13 +64,13 @@ enterpriseToolBuildDir = buildDir + r'\mongo\db\modules\enterprise'
msi_flavor = '2008R2Plus'
msi_platform = 'x64'
-if 'enterprise' in env['MONGO_MODULES']: # Enterprise
- msi_edition = 'Enterprise'
-else: # Community
- if get_option('ssl') == 'on':
- msi_edition = 'SSL'
- else:
- msi_edition = 'Standard'
+if 'enterprise' in env['MONGO_MODULES']: # Enterprise
+ msi_edition = 'Enterprise'
+else: # Community
+ if get_option('ssl') == 'on':
+ msi_edition = 'SSL'
+ else:
+ msi_edition = 'Standard'
full_version = env['MONGO_VERSION'].partition('-')[0]
@@ -94,7 +97,6 @@ objects = ["$BUILD_DIR/msi/" + file.replace(".wxs", ".wixobj") for file in sourc
# GUID generation for us rather then build a database of GUIDs in our build system
# For major updates, we are going to create a new directory/productid/upgrade_code ie, 2.6 -> 3.0
-
# candle: compile .wxs files into .wixobjs
candle_targets = env.Command(
target=objects,
@@ -103,21 +105,16 @@ candle_targets = env.Command(
'"$WIXCANDLE" -wx'
# cannot have anything other than x.x.x.x in version string.
# we should choose a fourth version number that reflects pre-ness.
- ' -dMongoDBMajorVersion=' + major_version +
- ' -dMongoDBVersion=' + full_version +
+ ' -dMongoDBMajorVersion=' + major_version + ' -dMongoDBVersion=' + full_version +
' -dLicenseSource=distsrc'
r' -dEnterpriseBase=' + enterprisebase + '\\'
- ' -dBinarySource=' + "\"$DESTDIR\\$PREFIX_BINDIR\"" +
- ' -dMergeModulesBasePath=' + "\"${MSVS['VCREDISTMERGEMODULEPATH']}\"" +
- ' -dMergeModuleFileCRT=' + env.GetMergeModuleNameForFeature('CRT') +
- ' -dEdition=' + msi_edition +
- ' -d"ProductId=*\"'
- ' -dUpgradeCode=' + upgrade_code +
- ' -dCustomActionDll=' + "\"$DESTDIR\\$PREFIX_BINDIR\\mongoca.dll\"" +
- ' -dConfiguration=Release'
+ ' -dBinarySource=' + "\"$DESTDIR\\$PREFIX_BINDIR\"" + ' -dMergeModulesBasePath=' +
+ "\"${MSVS['VCREDISTMERGEMODULEPATH']}\"" + ' -dMergeModuleFileCRT=' +
+ env.GetMergeModuleNameForFeature('CRT') + ' -dEdition=' + msi_edition + ' -d"ProductId=*\"'
+ ' -dUpgradeCode=' + upgrade_code + ' -dCustomActionDll=' +
+ "\"$DESTDIR\\$PREFIX_BINDIR\\mongoca.dll\"" + ' -dConfiguration=Release'
' -dOutDir=' + buildDir + r'\msi'
- ' -dPlatform=' + msi_platform +
- ' -dFlavor=' + msi_flavor +
+ ' -dPlatform=' + msi_platform + ' -dFlavor=' + msi_flavor +
r' -dProjectDir=buildscripts\packaging\msi\\'
' -dProjectName=MongoDB'
' -dTargetDir=' + buildDir + r'\msi'
@@ -125,13 +122,11 @@ candle_targets = env.Command(
' -dTargetFileName=${SERVER_ARCHIVE}'
r' -dSaslSource=c:\sasl\bin'
r' -dSnmpSource=c:\snmp\bin'
- r' -dSslSource=' + env['WINDOWS_OPENSSL_BIN'] +
- ' -out ' + buildDir + r'\msi\\'
- ' -arch ' + msi_platform +
- ' -ext "$WIXUIEXT"'
+ r' -dSslSource=' + env['WINDOWS_OPENSSL_BIN'] + ' -out ' + buildDir + r'\msi\\'
+ ' -arch ' + msi_platform + ' -ext "$WIXUIEXT"'
' -ext "$WIXUTILEXT"'
' $SOURCES'
- ]
+ ],
)
pre_msi = env.Command(
@@ -155,34 +150,34 @@ pre_msi = env.Command(
# so this consistency check can be ignored.
# -- https://msdn.microsoft.com/en-us/library/windows/desktop/aa368954(v=vs.85).aspx
' -sice:ICE30'
-
' -ext "$WIXUIEXT"'
' -ext "$WIXUTILEXT"'
' ${SOURCES}'
- ]
+ ],
)
-env.Depends(pre_msi, [
- '#/buildscripts/packaging/msi/mongod.yaml',
+env.Depends(
+ pre_msi,
+ [
+ '#/buildscripts/packaging/msi/mongod.yaml',
- # This could potentially be a superset of what we actually
- # require to build the MSI, but it should never be a subset.
- env.Alias('install-dist'),
+ # This could potentially be a superset of what we actually
+ # require to build the MSI, but it should never be a subset.
+ env.Alias('install-dist'),
- # We also need the mongoca DLL.
- env.Alias('install-msi-util')
-])
+ # We also need the mongoca DLL.
+ env.Alias('install-msi-util')
+ ],
+)
env.NoCache(pre_msi)
msi = env.Command(
target='$BUILD_DIR/msi/${SERVER_DIST_BASENAME}.msi',
source=pre_msi,
- action=[
- r'$PYTHON buildscripts\msitrim.py ${SOURCES} ${TARGET}'
- ]
+ action=[r'$PYTHON buildscripts\msitrim.py ${SOURCES} ${TARGET}'],
)
env.AlwaysBuild(msi)
env.NoCache(msi)
-env.Alias( "msi" , msi )
+env.Alias("msi", msi)
diff --git a/src/mongo/installer/msi/ca/SConscript b/src/mongo/installer/msi/ca/SConscript
index 2506143eb92..70fda4faef7 100644
--- a/src/mongo/installer/msi/ca/SConscript
+++ b/src/mongo/installer/msi/ca/SConscript
@@ -20,9 +20,8 @@ else:
env.Append(LIBS=[
'msi',
- 'user32'
- ]
-)
+ 'user32',
+])
ca = env.SharedLibrary(
target='mongoca',
diff --git a/src/mongo/logv2/SConscript b/src/mongo/logv2/SConscript
index 05351e1a2fe..cde87eda686 100644
--- a/src/mongo/logv2/SConscript
+++ b/src/mongo/logv2/SConscript
@@ -15,7 +15,7 @@ env.CppUnitTest(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/auth/security_token',
'$BUILD_DIR/mongo/db/multitenancy_params',
- ]
+ ],
)
env.Benchmark(
diff --git a/src/mongo/platform/SConscript b/src/mongo/platform/SConscript
index 71a0a57772d..b2a8b54a67d 100644
--- a/src/mongo/platform/SConscript
+++ b/src/mongo/platform/SConscript
@@ -21,7 +21,7 @@ env.CppUnitTest(
'stack_locator_test.cpp',
'decimal128_test.cpp',
'decimal128_bson_test.cpp',
- 'overflow_arithmetic_test.cpp'
+ 'overflow_arithmetic_test.cpp',
],
)
@@ -30,8 +30,7 @@ env.Benchmark(
source=[
'endian_bm.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
if not get_option("link-model") == "dynamic":
@@ -52,7 +51,7 @@ env.Library(
env.Library(
target="visibility_test_lib1",
source=[
- "visibility_test_lib1.cpp"
+ "visibility_test_lib1.cpp",
],
LIBDEPS_NO_INHERIT=[
'$BUILD_DIR/third_party/shim_allocator',
@@ -67,7 +66,7 @@ env.Library(
env.Library(
target="visibility_test_lib2",
source=[
- "visibility_test_lib2.cpp"
+ "visibility_test_lib2.cpp",
],
LIBDEPS_NO_INHERIT=[
'$BUILD_DIR/third_party/shim_allocator',
diff --git a/src/mongo/resmoke/SConscript b/src/mongo/resmoke/SConscript
index fb80ab1a713..f2d81a98a66 100644
--- a/src/mongo/resmoke/SConscript
+++ b/src/mongo/resmoke/SConscript
@@ -9,9 +9,9 @@ install_dir = env.Dir('$DESTDIR/$PREFIX_BINDIR').path.replace("\\", r"\\")
resmoke_py = env.Substfile(
target="resmoke.py",
source='resmoke.py.in',
- SUBST_DICT = {
+ SUBST_DICT={
'@install_dir@': install_dir,
- }
+ },
)
resmoke_py_install = env.AutoInstall(
'$PREFIX_BINDIR',
diff --git a/src/mongo/rpc/SConscript b/src/mongo/rpc/SConscript
index 385279f5c6c..a3de4fa709e 100644
--- a/src/mongo/rpc/SConscript
+++ b/src/mongo/rpc/SConscript
@@ -138,7 +138,7 @@ env.Library(
env.Library(
target=[
- 'metadata_impersonated_user'
+ 'metadata_impersonated_user',
],
source=[
'metadata/impersonated_user_metadata.cpp',
@@ -154,7 +154,6 @@ env.Library(
],
)
-
env.Library(
target='client_metadata',
source=[
@@ -199,14 +198,14 @@ if wiredtiger:
'metadata',
'rewrite_state_change_errors',
'rpc',
- ]
+ ],
)
env.CppIntegrationTest(
target='rpc_integration_test',
source=[
'op_msg_integration_test.cpp',
- 'op_legacy_integration_test.cpp'
+ 'op_legacy_integration_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/client/clientdriver_network',
diff --git a/src/mongo/s/SConscript b/src/mongo/s/SConscript
index 2864810d025..ac21f54fe9c 100644
--- a/src/mongo/s/SConscript
+++ b/src/mongo/s/SConscript
@@ -140,11 +140,11 @@ env.Library(
env.Library(
target="load_balancer_feature_flag",
source=[
- 'load_balancer_feature_flag.idl'
+ 'load_balancer_feature_flag.idl',
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/feature_flag',
- ]
+ ],
)
env.Library(
@@ -254,7 +254,7 @@ env.Library(
source=[
'sharding_test_fixture_common.cpp',
'catalog_cache_loader_mock.cpp',
- 'catalog_cache_mock.cpp'
+ 'catalog_cache_mock.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/client/remote_command_targeter_mock',
@@ -351,12 +351,12 @@ env.Benchmark(
env.Library(
target='committed_optime_metadata_hook',
source=[
- 'committed_optime_metadata_hook.cpp'
+ 'committed_optime_metadata_hook.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/rpc/metadata',
'coreshard',
- ]
+ ],
)
env.Library(
@@ -378,7 +378,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/bson/util/bson_extract',
'$BUILD_DIR/mongo/rpc/metadata',
- ]
+ ],
)
env.Library(
@@ -403,7 +403,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/server_parameter',
- ]
+ ],
)
env.Library(
@@ -483,7 +483,8 @@ env.Library(
'$BUILD_DIR/mongo/transport/transport_layer_manager',
'$BUILD_DIR/mongo/util/clock_sources',
'$BUILD_DIR/mongo/util/fail_point',
- '$BUILD_DIR/mongo/util/latch_analyzer' if get_option('use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/latch_analyzer'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/net/http_client_impl',
'$BUILD_DIR/mongo/util/net/ssl_options_server' if get_option('ssl') == 'on' else '',
'$BUILD_DIR/mongo/util/ntservice',
@@ -540,7 +541,8 @@ env.Library(
'$BUILD_DIR/mongo/db/startup_warnings_common',
'$BUILD_DIR/mongo/transport/service_entry_point',
'$BUILD_DIR/mongo/transport/transport_layer_manager',
- '$BUILD_DIR/mongo/util/latch_analyzer' if get_option('use-diagnostic-latches') == 'on' else [],
+ '$BUILD_DIR/mongo/util/latch_analyzer'
+ if get_option('use-diagnostic-latches') == 'on' else [],
'$BUILD_DIR/mongo/util/signal_handlers',
'client/sharding_client',
'commands/cluster_commands',
@@ -569,7 +571,8 @@ if env.TargetOSIs('windows'):
('@mongo_version_patch@', version_parts[2]),
('@mongo_version_extra@', version_parts[3]),
('@mongo_version_extra_str@', version_extra),
- ])
+ ],
+ )
env.Alias('generated-sources', generatedServerManifest)
env.Depends("mongos.res", generatedServerManifest)
diff --git a/src/mongo/s/catalog/SConscript b/src/mongo/s/catalog/SConscript
index 964be746811..6b6d5a43151 100644
--- a/src/mongo/s/catalog/SConscript
+++ b/src/mongo/s/catalog/SConscript
@@ -29,8 +29,8 @@ env.Library(
'sharding_catalog_client',
],
LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/db/logical_session_id_helpers'
- ]
+ '$BUILD_DIR/mongo/db/logical_session_id_helpers',
+ ],
)
env.Library(
@@ -40,6 +40,6 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/s/client/shard_interface',
- 'sharding_catalog_client'
- ]
+ 'sharding_catalog_client',
+ ],
)
diff --git a/src/mongo/s/commands/SConscript b/src/mongo/s/commands/SConscript
index ad4c3e46ae3..543fa0a59b8 100644
--- a/src/mongo/s/commands/SConscript
+++ b/src/mongo/s/commands/SConscript
@@ -17,7 +17,7 @@ env.Library(
'$BUILD_DIR/mongo/db/commands',
'$BUILD_DIR/mongo/s/grid',
'$BUILD_DIR/mongo/s/startup_initialization',
- ]
+ ],
)
# These commands are linked in mongos and mongoqd
@@ -154,7 +154,7 @@ env.Library(
'$BUILD_DIR/mongo/transport/message_compressor',
'$BUILD_DIR/mongo/transport/transport_layer_common',
'cluster_commands_common',
- ]
+ ],
)
env.Library(
@@ -185,7 +185,7 @@ env.Library(
'$BUILD_DIR/mongo/s/sharding_router_api',
'$BUILD_DIR/mongo/transport/message_compressor',
'$BUILD_DIR/mongo/transport/transport_layer_common',
- ]
+ ],
)
# These commands are linked in MongoS only
@@ -229,7 +229,7 @@ env.Library(
'$BUILD_DIR/mongo/transport/transport_layer_common',
'cluster_commands',
'cluster_commands_common',
- ]
+ ],
)
env.CppUnitTest(
diff --git a/src/mongo/s/query/SConscript b/src/mongo/s/query/SConscript
index f6f1ac53b05..ab67239ff80 100644
--- a/src/mongo/s/query/SConscript
+++ b/src/mongo/s/query/SConscript
@@ -44,7 +44,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/timeseries/timeseries_options',
- ]
+ ],
)
env.Library(
@@ -92,13 +92,13 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/catalog/collection_uuid_mismatch_info',
- ]
+ ],
)
env.Library(
target="store_possible_cursor",
source=[
- "store_possible_cursor.cpp"
+ "store_possible_cursor.cpp",
],
LIBDEPS=[
"$BUILD_DIR/mongo/base",
diff --git a/src/mongo/scripting/SConscript b/src/mongo/scripting/SConscript
index 5be96f735d5..9ff0309cc22 100644
--- a/src/mongo/scripting/SConscript
+++ b/src/mongo/scripting/SConscript
@@ -48,7 +48,7 @@ env.Library(
],
LBDEPS_PRIVATE=[
'bson_template_evaluator',
- ]
+ ],
)
if jsEngine:
@@ -59,16 +59,13 @@ if jsEngine:
# TODO(SERVER-59992): Remove -Wno-class-memacces where possible.
'-Wno-unknown-warning-option',
'-Wno-class-memaccess',
- ],
- )
+ ], )
scriptingEnv.InjectMozJS()
scriptingEnv.JSHeader(
target='mozjs/mongohelpers_js.cpp',
- source=[
- 'mozjs/mongohelpers.js'
- ]
+ source=['mozjs/mongohelpers.js'],
)
env.Alias('generated-sources', 'mozjs/mongohelpers_js.cpp')
@@ -138,7 +135,7 @@ else:
env.Library(
target='scripting',
source=[
- 'scripting_none.cpp'
+ 'scripting_none.cpp',
],
LIBDEPS=[
'scripting_none',
diff --git a/src/mongo/shell/SConscript b/src/mongo/shell/SConscript
index 28d8bc07156..6607afc5792 100644
--- a/src/mongo/shell/SConscript
+++ b/src/mongo/shell/SConscript
@@ -34,7 +34,8 @@ generateJSErrorCodes = env.Command(
'$BUILD_DIR/mongo/base/error_codes.yml',
'error_codes.tpl.js',
],
- action=['$PYTHON ${SOURCES[0]} ${SOURCES[1]} ${SOURCES[2]} ${TARGETS[0]}'])
+ action=['$PYTHON ${SOURCES[0]} ${SOURCES[1]} ${SOURCES[2]} ${TARGETS[0]}'],
+)
env.Alias('generated-sources', generateJSErrorCodes)
# Files added here need to be added in scripting/engine.cpp and buildscripts/vcxproj.header as well.
@@ -57,7 +58,7 @@ js_header = env.JSHeader(
"utils_auth.js",
"utils.js",
"utils_sh.js",
- ]
+ ],
)
env.Alias('generated-sources', js_header)
@@ -139,7 +140,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/auth/security_token',
- ]
+ ],
)
if get_option('ssl') == 'on':
@@ -191,8 +192,7 @@ if get_option('ssl') == 'on':
# TODO(SERVER-59992): Remove -Wno-class-memacces where possible.
'-Wno-unknown-warning-option',
'-Wno-class-memaccess',
- ],
- )
+ ], )
scriptingEnv.InjectMozJS()
@@ -228,7 +228,7 @@ env.Library(
target="linenoise",
source=[
"linenoise.cpp",
- "mk_wcwidth.cpp"
+ "mk_wcwidth.cpp",
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -288,7 +288,6 @@ if not has_option('noshell') and jsEngine:
LIBDEPS=[
# NOTE: This list must remain empty. Please only add to LIBDEPS_PRIVATE
],
-
)
env.Library(
@@ -322,14 +321,15 @@ if not has_option('noshell') and jsEngine:
shellEnv.Append(LIBS=["winmm"])
generatedMongoManifest = shellEnv.Substfile(
- 'mongo.manifest.in',
- SUBST_DICT=[
- ('@mongo_version_major@', version_parts[0]),
- ('@mongo_version_minor@', version_parts[1]),
- ('@mongo_version_patch@', version_parts[2]),
- ('@mongo_version_extra@', version_parts[3]),
- ('@mongo_version_extra_str@', version_extra),
- ])
+ 'mongo.manifest.in',
+ SUBST_DICT=[
+ ('@mongo_version_major@', version_parts[0]),
+ ('@mongo_version_minor@', version_parts[1]),
+ ('@mongo_version_patch@', version_parts[2]),
+ ('@mongo_version_extra@', version_parts[3]),
+ ('@mongo_version_extra_str@', version_extra),
+ ],
+ )
shellEnv.Alias('generated-sources', generatedMongoManifest)
shellEnv.Depends("mongo.res", generatedMongoManifest)
@@ -358,12 +358,10 @@ if not has_option('noshell') and jsEngine:
],
)
-
env.CppUnitTest(
target='shell_test',
source=[
- 'kms_test.cpp' if get_option('ssl') == 'on' else [],
- 'shell_options_test.cpp',
+ 'kms_test.cpp' if get_option('ssl') == 'on' else [], 'shell_options_test.cpp',
'shell_utils_test.cpp'
],
LIBDEPS=[
diff --git a/src/mongo/stdx/SConscript b/src/mongo/stdx/SConscript
index fc70228475e..ece7efba3f5 100644
--- a/src/mongo/stdx/SConscript
+++ b/src/mongo/stdx/SConscript
@@ -29,7 +29,7 @@ env.Library(
env.CppUnitTest(
target='stdx_test',
source=[
- 'unordered_map_test.cpp'
+ 'unordered_map_test.cpp',
],
LIBDEPS=[
'$BUILD_DIR/third_party/shim_abseil',
diff --git a/src/mongo/tools/SConscript b/src/mongo/tools/SConscript
index af952f2df16..78882321775 100644
--- a/src/mongo/tools/SConscript
+++ b/src/mongo/tools/SConscript
@@ -15,7 +15,7 @@ mongobridge = env.Program(
"bridge_commands.cpp",
"mongobridge_options.cpp",
"mongobridge_options.idl",
- "mongobridge_options_init.cpp"
+ "mongobridge_options_init.cpp",
],
LIBDEPS=[
'$BUILD_DIR/mongo/db/dbmessage',
diff --git a/src/mongo/transport/SConscript b/src/mongo/transport/SConscript
index 8043daa2ecd..a7433a979cd 100644
--- a/src/mongo/transport/SConscript
+++ b/src/mongo/transport/SConscript
@@ -87,7 +87,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'transport_layer',
- ]
+ ],
)
tlEnv.Library(
@@ -157,7 +157,7 @@ zlibEnv.Library(
'$BUILD_DIR/third_party/shim_snappy',
'$BUILD_DIR/third_party/shim_zlib',
'$BUILD_DIR/third_party/shim_zstd',
- ]
+ ],
)
env.Library(
@@ -225,8 +225,7 @@ tlEnvTest.Append(
# upgrade per SERVER-54569.
CCFLAGS=[] if env.TargetOSIs('windows') else [
'-Wno-nonnull',
- ],
-)
+ ], )
tlEnvTest.CppIntegrationTest(
target='transport_integration_test',
diff --git a/src/mongo/unittest/SConscript b/src/mongo/unittest/SConscript
index b045b3f7429..30b4d352ac6 100644
--- a/src/mongo/unittest/SConscript
+++ b/src/mongo/unittest/SConscript
@@ -26,9 +26,9 @@ utEnv.Library(
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
'$BUILD_DIR/third_party/shim_pcrecpp',
- '$BUILD_DIR/third_party/shim_yaml'
+ '$BUILD_DIR/third_party/shim_yaml',
],
- AIB_COMPONENT='unittests'
+ AIB_COMPONENT='unittests',
)
env.Library(
@@ -45,7 +45,7 @@ env.Library(
'$BUILD_DIR/mongo/db/wire_version',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
],
- AIB_COMPONENT='unittests'
+ AIB_COMPONENT='unittests',
)
env.Library(
@@ -70,7 +70,7 @@ env.Library(
'$BUILD_DIR/mongo/util/options_parser/options_parser_init',
'$BUILD_DIR/mongo/util/testing_options',
],
- AIB_COMPONENT='integration-tests'
+ AIB_COMPONENT='integration-tests',
)
bmEnv = env.Clone()
@@ -78,7 +78,7 @@ bmEnv.InjectThirdParty(libraries=['benchmark'])
bmEnv.Library(
target='benchmark_main',
source=[
- 'benchmark_main.cpp'
+ 'benchmark_main.cpp',
],
LIBDEPS=[
# benchmark_main must not be made to depend on additional libraries.
@@ -87,7 +87,7 @@ bmEnv.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/third_party/shim_benchmark',
],
- AIB_COMPONENT='benchmarks'
+ AIB_COMPONENT='benchmarks',
)
env.Library(
@@ -98,7 +98,7 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/executor/task_executor_interface',
],
- AIB_COMPONENT='benchmarks'
+ AIB_COMPONENT='benchmarks',
)
env.CppUnitTest(
@@ -116,7 +116,7 @@ env.CppUnitTest(
env.Benchmark(
target='system_resource_canary_bm',
source=[
- 'system_resource_canary_bm.cpp'
+ 'system_resource_canary_bm.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/util/processinfo',
diff --git a/src/mongo/util/SConscript b/src/mongo/util/SConscript
index 21635d44a58..69e4cf54149 100644
--- a/src/mongo/util/SConscript
+++ b/src/mongo/util/SConscript
@@ -21,19 +21,23 @@ js_engine_ver = get_option("js-engine") if get_option("server-js") == "on" else
module_list = ',\n'.join(['"{0}"_sd'.format(x) for x in env['MONGO_MODULES']])
+
# Render the MONGO_BUILDINFO_ENVIRONMENT_DATA dict into an initializer for a
# `std::vector<VersionInfoInterface::BuildInfoField>`.
def fmtBuildInfo(data):
def fmtBool(val):
return "true" if val else "false"
+
def fmtStr(val):
return 'R"({0})"_sd'.format(val.replace("\\", r"\\"))
+
def fmtObj(obj):
- return '{{{}, {}, {}, {}}}'.format(fmtStr(obj['key']),
- fmtStr(env.subst(obj['value'])),
- fmtBool(obj['inBuildInfo']),
- fmtBool(obj['inVersion']))
- return ',\n'.join([fmtObj(obj) for _,obj in data.items()])
+ return '{{{}, {}, {}, {}}}'.format(
+ fmtStr(obj['key']), fmtStr(env.subst(obj['value'])), fmtBool(obj['inBuildInfo']),
+ fmtBool(obj['inVersion']))
+
+ return ',\n'.join([fmtObj(obj) for _, obj in data.items()])
+
buildInfoInitializer = fmtBuildInfo(env['MONGO_BUILDINFO_ENVIRONMENT_DATA'])
@@ -51,7 +55,8 @@ generatedVersionFile = env.Substfile(
('@buildinfo_allocator@', env['MONGO_ALLOCATOR']),
('@buildinfo_modules@', module_list),
('@buildinfo_environment_data@', buildInfoInitializer),
- ])
+ ],
+)
env.Alias('generated-sources', generatedVersionFile)
if env.TargetOSIs('windows'):
@@ -64,7 +69,8 @@ if env.TargetOSIs('windows'):
('@mongo_version_minor@', version_parts[1]),
('@mongo_version_patch@', version_parts[2]),
('@mongo_git_hash@', env['MONGO_GIT_HASH']),
- ])
+ ],
+ )
env.Alias('generated-sources', generatedResourceConstantFile)
# Shim library for boost to depend on
@@ -101,22 +107,22 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
target='intrusive_counter',
source=[
'intrusive_counter.cpp',
- ],
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.Library(
target='log_and_backoff',
- source= [
+ source=[
'log_and_backoff.cpp',
],
LIBDEPS=[
@@ -126,7 +132,7 @@ env.Library(
env.Library(
target='regex_util',
- source= [
+ source=[
'regex_util.cpp',
],
LIBDEPS=[
@@ -166,7 +172,7 @@ env.Library(
env.Library(
target='clock_source_mock',
source=[
- 'clock_source_mock.cpp'
+ 'clock_source_mock.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -287,7 +293,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/service_context',
- ]
+ ],
)
env.Library(
@@ -297,7 +303,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.CppUnitTest(
@@ -311,7 +317,6 @@ env.CppUnitTest(
],
)
-
env.CppUnitTest(
target='thread_safety_context_test',
source=[
@@ -332,12 +337,10 @@ if env['MONGO_ALLOCATOR'] in ['tcmalloc', 'tcmalloc-experimental']:
# If our changes to tcmalloc are ever upstreamed, this should become set based on a top
# level configure check, though its effects should still be scoped just to these files.
- tcmspEnv.Append(
- CPPDEFINES=[
- 'MONGO_HAVE_GPERFTOOLS_GET_THREAD_CACHE_SIZE',
- 'MONGO_HAVE_GPERFTOOLS_SIZE_CLASS_STATS'
- ]
- )
+ tcmspEnv.Append(CPPDEFINES=[
+ 'MONGO_HAVE_GPERFTOOLS_GET_THREAD_CACHE_SIZE',
+ 'MONGO_HAVE_GPERFTOOLS_SIZE_CLASS_STATS',
+ ])
if not use_system_version_of_library('valgrind'):
# Include valgrind since tcmalloc disables itself while running under valgrind
@@ -351,8 +354,7 @@ if env['MONGO_ALLOCATOR'] in ['tcmalloc', 'tcmalloc-experimental']:
'tcmalloc_parameters.idl',
'heap_profiler.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/commands/server_status',
'$BUILD_DIR/mongo/idl/server_parameter',
@@ -401,7 +403,7 @@ env.Library(
if get_option('use-diagnostic-latches') == 'on':
env.Library(
target='diagnostic_info',
- source= [
+ source=[
'diagnostic_info.cpp',
],
LIBDEPS=[
@@ -412,7 +414,7 @@ if get_option('use-diagnostic-latches') == 'on':
env.Library(
target='latch_analyzer',
- source= [
+ source=[
'latch_analyzer.cpp',
],
LIBDEPS=[
@@ -442,7 +444,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- 'net/network', # this is for using listener to check elapsed time
+ 'net/network', # this is for using listener to check elapsed time
],
)
@@ -511,7 +513,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/idl/server_parameter',
- ]
+ ],
)
env.Library(
@@ -540,8 +542,7 @@ env.Benchmark(
source=[
'decimal_counter_bm.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
env.Benchmark(
@@ -549,8 +550,7 @@ env.Benchmark(
source=[
'itoa_bm.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
env.Benchmark(
@@ -558,8 +558,7 @@ env.Benchmark(
source=[
'future_bm.cpp',
],
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
env.Library(
@@ -568,15 +567,14 @@ env.Library(
'future_util.cpp',
],
LIBDEPS=[
- '$BUILD_DIR/mongo/executor/task_executor_interface'
+ '$BUILD_DIR/mongo/executor/task_executor_interface',
],
)
env.Benchmark(
target='hash_table_bm',
source='hash_table_bm.cpp',
- LIBDEPS=[
- ],
+ LIBDEPS=[],
)
if env.TargetOSIs('linux'):
@@ -612,8 +610,8 @@ if not use_system_version_of_library("icu"):
source=[
"generate_icu_init_cpp.py",
("$BUILD_DIR/third_party/icu4c-57.1/source/mongo_sources/icudt57l.dat"
- if endian == "little"
- else "$BUILD_DIR/third_party/icu4c-57.1/source/mongo_sources/icudt57b.dat"),
+ if endian == "little" else
+ "$BUILD_DIR/third_party/icu4c-57.1/source/mongo_sources/icudt57b.dat"),
],
action="$PYTHON ${SOURCES[0]} -o $TARGET -i ${SOURCES[1]}",
)
@@ -633,8 +631,7 @@ if not use_system_version_of_library("icu"):
("U_DISABLE_RENAMING", 1),
("U_STATIC_IMPLEMENTATION", 1),
("U_USING_ICU_NAMESPACE", 0),
- ],
- )
+ ], )
# When using ICU from third_party, icu_init.cpp will load a subset of
# ICU's data files using udata_setCommonData() in an initializer.
@@ -769,7 +766,7 @@ if env.TargetOSIs('windows'):
],
LIBS=[
'shell32',
- env['LIBS']
+ env['LIBS'],
],
)
@@ -781,11 +778,9 @@ env.Benchmark(
stacktraceEnv = env.Clone()
if use_libunwind:
stacktraceEnv.InjectThirdParty(libraries=['unwind'])
- stacktraceEnv.AppendUnique(
- LIBDEPS=[
- '$BUILD_DIR/third_party/shim_unwind',
- ],
- )
+ stacktraceEnv.AppendUnique(LIBDEPS=[
+ '$BUILD_DIR/third_party/shim_unwind',
+ ], )
stacktraceEnv.CppUnitTest(
target=[
@@ -851,4 +846,3 @@ env.Benchmark(
'processinfo',
],
)
-
diff --git a/src/mongo/util/cmdline_utils/SConscript b/src/mongo/util/cmdline_utils/SConscript
index 1fbe0abb7e7..1cf262ac6c6 100644
--- a/src/mongo/util/cmdline_utils/SConscript
+++ b/src/mongo/util/cmdline_utils/SConscript
@@ -12,5 +12,5 @@ env.Library(
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
- ])
-
+ ],
+)
diff --git a/src/mongo/util/concurrency/SConscript b/src/mongo/util/concurrency/SConscript
index 7ac61ec0025..7e370ad903b 100644
--- a/src/mongo/util/concurrency/SConscript
+++ b/src/mongo/util/concurrency/SConscript
@@ -16,22 +16,21 @@ env.Library(
env.Library(
target='thread_pool_test_fixture',
- source=[
- 'thread_pool_test_common.cpp',
- 'thread_pool_test_fixture.cpp'
- ],
+ source=['thread_pool_test_common.cpp', 'thread_pool_test_fixture.cpp'],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/unittest/unittest',
- ]
+ ],
)
-env.Library('ticketholder',
- ['ticketholder.cpp'],
- LIBDEPS=[
- '$BUILD_DIR/mongo/base',
- '$BUILD_DIR/mongo/db/service_context',
- '$BUILD_DIR/third_party/shim_boost',
- ])
+env.Library(
+ 'ticketholder',
+ ['ticketholder.cpp'],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/base',
+ '$BUILD_DIR/mongo/db/service_context',
+ '$BUILD_DIR/third_party/shim_boost',
+ ],
+)
env.Library(
target='spin_lock',
@@ -40,7 +39,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
- ]
+ ],
)
env.CppUnitTest(
@@ -57,7 +56,7 @@ env.CppUnitTest(
'thread_pool',
'thread_pool_test_fixture',
'ticketholder',
- ]
+ ],
)
env.Benchmark(
@@ -69,4 +68,3 @@ env.Benchmark(
'ticketholder',
],
)
-
diff --git a/src/mongo/util/net/SConscript b/src/mongo/util/net/SConscript
index 9aec6ba3150..a8349820084 100644
--- a/src/mongo/util/net/SConscript
+++ b/src/mongo/util/net/SConscript
@@ -39,7 +39,7 @@ env.Library(
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
- ]
+ ],
)
env.Library(
@@ -54,7 +54,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/mongo/util/options_parser/options_parser',
- ]
+ ],
)
env.Library(
@@ -72,7 +72,7 @@ env.Library(
'$BUILD_DIR/mongo/db/auth/cluster_auth_mode',
'$BUILD_DIR/mongo/db/server_options_core',
'$BUILD_DIR/mongo/util/options_parser/options_parser',
- ]
+ ],
)
env.Library(
@@ -85,7 +85,7 @@ env.Library(
],
LIBDEPS_PRIVATE=[
'ssl_options',
- ]
+ ],
)
if not get_option('ssl') == 'off':
@@ -99,10 +99,8 @@ if not get_option('ssl') == 'off':
'$BUILD_DIR/mongo/base',
'ssl_options',
],
- LIBDEPS_PRIVATE=[
- '$BUILD_DIR/mongo/crypto/symmetric_crypto'
- ]
- );
+ LIBDEPS_PRIVATE=['$BUILD_DIR/mongo/crypto/symmetric_crypto'],
+ )
env.Library(
target='ssl_manager',
@@ -193,7 +191,6 @@ env.Library(
],
)
-
if http_client == "off":
env.Library(
target='http_client_impl',
@@ -287,5 +284,4 @@ if get_option('ssl') == 'on':
'$BUILD_DIR/mongo/util/concurrency/thread_pool',
'$BUILD_DIR/mongo/util/version_impl',
],
-)
-
+ )
diff --git a/src/mongo/util/options_parser/SConscript b/src/mongo/util/options_parser/SConscript
index f8c7c2c0046..1290fdc521a 100644
--- a/src/mongo/util/options_parser/SConscript
+++ b/src/mongo/util/options_parser/SConscript
@@ -35,8 +35,11 @@ optsEnv.Library(
# library because some code that is shared between many different binaries needs to link against the
# options_parser library, but not all these binaries need to actually run the option parsing.
# Linking against this library will cause the option parsing initializer to actually be run.
-env.Library('options_parser_init', ['options_parser_init.cpp'],
- LIBDEPS=['options_parser'])
+env.Library(
+ 'options_parser_init',
+ ['options_parser_init.cpp'],
+ LIBDEPS=['options_parser'],
+)
env.CppUnitTest(
target='options_parser_test',
diff --git a/src/mongo/watchdog/SConscript b/src/mongo/watchdog/SConscript
index 102c548c689..f26af597f28 100644
--- a/src/mongo/watchdog/SConscript
+++ b/src/mongo/watchdog/SConscript
@@ -11,7 +11,7 @@ env.Library(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/db/service_context',
'$BUILD_DIR/mongo/db/storage/storage_options',
- ]
+ ],
)
env.Library(
diff --git a/src/third_party/IntelRDFPMathLib20U1/SConscript b/src/third_party/IntelRDFPMathLib20U1/SConscript
index 8e55a1fe620..6cf292b64f6 100644
--- a/src/third_party/IntelRDFPMathLib20U1/SConscript
+++ b/src/third_party/IntelRDFPMathLib20U1/SConscript
@@ -262,12 +262,14 @@ cpp_defines = {
libs = []
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
def float53_object(target, source, extra_defines=None):
source = "LIBRARY/float128/" + source + ".c"
obj_env = env.Clone()
@@ -280,13 +282,13 @@ def float53_object(target, source, extra_defines=None):
obj_env.Append(CCFLAGS='-w')
return obj_env.LibraryObject(target, source)
+
# Set OS Defines
if env.TargetOSIs('darwin'):
cpp_defines['LINUX'] = '1'
cpp_defines['mach'] = '1'
extra_objects = [
- float53_object('dpml_log1p_t', 'dpml_log',
- [('LOG1P', '1'), ('efi2', '1'), ('EFI2', '1')])
+ float53_object('dpml_log1p_t', 'dpml_log', [('LOG1P', '1'), ('efi2', '1'), ('EFI2', '1')])
]
files += extra_objects
@@ -341,15 +343,14 @@ if env.TargetOSIs('windows'):
# C4477: incorrect scanf format string
env.Append(CCFLAGS=['/wd4477'])
-
if env.ToolchainIs('msvc'):
removeIfPresent(env['CCFLAGS'], '/W3')
else:
env.Append(CCFLAGS='-w')
-
libenv = env.Clone()
+
# On windows and macos we don't have this problem, but on other
# platforms we do, where the system ligbcc may duplicate symbols that
# exist in the intel library. We use a version script to force our
@@ -367,12 +368,14 @@ def export_file_generator(target, source, env, for_signature):
if env.TargetOSIs('posix'):
return env.File(env.subst("${TARGET.base}.version_script", target=target))
+
def export_flag_generator(target, source, env, for_signature):
if env.TargetOSIs('darwin'):
return "-Wl,-exported_symbols_list,"
if env.TargetOSIs('posix'):
return "-Wl,--version-script,"
+
if env.TargetOSIs('darwin', 'posix'):
libenv.AppendUnique(
EXPORT_FLAG_GEN=export_flag_generator,
@@ -388,11 +391,14 @@ inteldecimal_library = libenv.Library(
LIBS=libenv['LIBS'] + libs,
LIBDEPS_TAGS=[
'init-no-global-side-effects',
- ]
+ ],
)
if 'EXPORT_FILE_GEN' in libenv:
- libenv.Depends(inteldecimal_library, libenv.subst('$EXPORT_FILE_GEN', target=inteldecimal_library))
+ libenv.Depends(
+ inteldecimal_library,
+ libenv.subst('$EXPORT_FILE_GEN', target=inteldecimal_library),
+ )
if env["BUILDERS"].get("Ninja", None) is not None:
Return()
@@ -437,9 +443,9 @@ readtest_input = env.AutoInstall(
env.Depends(readtest_input, readtest)
readtest_dict = {
- '@readtest_python_interpreter@' : sys.executable.replace('\\', r'\\'),
- '@readtest_program@' : readtest[0].name,
- '@readtest_input@' : readtest_input[0].name,
+ '@readtest_python_interpreter@': sys.executable.replace('\\', r'\\'),
+ '@readtest_program@': readtest[0].name,
+ '@readtest_input@': readtest_input[0].name,
}
readtest_wrapper = env.Substfile(
diff --git a/src/third_party/SConscript b/src/third_party/SConscript
index 8a5b21f1c0a..07dee011853 100644
--- a/src/third_party/SConscript
+++ b/src/third_party/SConscript
@@ -22,46 +22,32 @@ tomcryptSuffix = '-1.18.2'
variantSuffix = '-1.4.0'
thirdPartyEnvironmentModifications = {
- 'abseil-cpp' : {
- 'CPPPATH' : ['#/src/third_party/abseil-cpp-master/abseil-cpp'],
- },
- 'fmt' : {
- 'CPPPATH' : ['#src/third_party/fmt/dist/include'],
- },
- 's2' : {
- 'CPPPATH' : ['#src/third_party/s2'],
- },
- 'safeint' : {
- 'CPPPATH' : ['#src/third_party/SafeInt'],
+ 'abseil-cpp': {'CPPPATH': ['#/src/third_party/abseil-cpp-master/abseil-cpp'], },
+ 'fmt': {'CPPPATH': ['#src/third_party/fmt/dist/include'], },
+ 's2': {'CPPPATH': ['#src/third_party/s2'], },
+ 'safeint': {
+ 'CPPPATH': ['#src/third_party/SafeInt'],
# SAFEINT_USE_INTRINSICS=0 for overflow-safe constexpr multiply. See comment in SafeInt.hpp.
- 'CPPDEFINES' : [('SAFEINT_USE_INTRINSICS', 0)],
- },
- 'timelib' : {
- 'CPPPATH' : ['#/src/third_party/timelib' + timelibSuffix],
- },
- 'unwind' : {
- },
- 'variant' : {
- 'CPPPATH' : ['#src/third_party/variant' + variantSuffix + '/include'],
- },
+ 'CPPDEFINES': [('SAFEINT_USE_INTRINSICS', 0)],
+ },
+ 'timelib': {'CPPPATH': ['#/src/third_party/timelib' + timelibSuffix], },
+ 'unwind': {},
+ 'variant': {'CPPPATH': ['#src/third_party/variant' + variantSuffix + '/include'], },
}
+
def injectMozJS(thisEnv):
thisEnv.InjectThirdParty(libraries=['mozjs'])
if thisEnv.TargetOSIs('windows'):
- thisEnv.Append(
- CPPDEFINES=[
- '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING',
- ]
- )
+ thisEnv.Append(CPPDEFINES=[
+ '_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING',
+ ], )
else:
- thisEnv.Append(
- CXXFLAGS=[
- '-Wno-non-virtual-dtor',
- '-Wno-invalid-offsetof',
- ],
- )
+ thisEnv.Append(CXXFLAGS=[
+ '-Wno-non-virtual-dtor',
+ '-Wno-invalid-offsetof',
+ ], )
thisEnv.Prepend(CPPDEFINES=[
'JS_USE_CUSTOM_ALLOCATOR',
@@ -74,8 +60,8 @@ def injectMozJS(thisEnv):
'JS_DEBUG',
])
-env.AddMethod(injectMozJS, 'InjectMozJS')
+env.AddMethod(injectMozJS, 'InjectMozJS')
if not use_system_version_of_library('tcmalloc'):
# GPerftools does this slightly differently than the others.
@@ -83,17 +69,16 @@ if not use_system_version_of_library('tcmalloc'):
if not use_system_version_of_library('pcre'):
thirdPartyEnvironmentModifications['pcre'] = {
- 'CPPPATH' : ['#/src/third_party/pcre' + pcreSuffix],
+ 'CPPPATH': ['#/src/third_party/pcre' + pcreSuffix],
}
if not use_system_version_of_library('pcre2'):
thirdPartyEnvironmentModifications['pcre2'] = {
- 'CPPPATH' : ['#/src/third_party/pcre2'],
+ 'CPPPATH': ['#/src/third_party/pcre2'],
}
if not use_system_version_of_library('boost'):
-
# On at least Apple clang, proto throws this error.
#
# See https://github.com/boostorg/proto/issues/30.
@@ -106,38 +91,38 @@ if not use_system_version_of_library('boost'):
return '-Wno-error=unknown-warning-option'
thirdPartyEnvironmentModifications['boost'] = {
- 'CPPPATH' : ['#/src/third_party/boost'],
+ 'CPPPATH': ['#/src/third_party/boost'],
# We could narror further to just clang on Darwin, but there is
# little harm in applying for all clang.
- 'NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN' : NoErrorForUnknownWarningOptionGenerator,
- 'CCFLAGS' : ['$NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN'] if env.ToolchainIs('clang') else []
+ 'NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN': NoErrorForUnknownWarningOptionGenerator,
+ 'CCFLAGS': ['$NOERROR_FOR_UNKNOWN_WARNING_OPTION_GEN'] if env.ToolchainIs('clang') else []
}
if not use_system_version_of_library('snappy'):
thirdPartyEnvironmentModifications['snappy'] = {
- 'CPPPATH' : ['#/src/third_party/snappy' + snappySuffix],
+ 'CPPPATH': ['#/src/third_party/snappy' + snappySuffix],
}
# Valgrind is a header only include as valgrind.h includes everything we need
if not use_system_version_of_library('valgrind'):
thirdPartyEnvironmentModifications['valgrind'] = {
- 'CPPPATH' : ['#/src/third_party/valgrind/include'],
+ 'CPPPATH': ['#/src/third_party/valgrind/include'],
}
if not use_system_version_of_library('zlib'):
thirdPartyEnvironmentModifications['zlib'] = {
- 'CPPPATH' : ['#/src/third_party/zlib' + zlibSuffix],
+ 'CPPPATH': ['#/src/third_party/zlib' + zlibSuffix],
}
if not use_system_version_of_library('zstd'):
thirdPartyEnvironmentModifications['zstd'] = {
- 'CPPPATH' : ['#/src/third_party/zstandard/zstd/lib'],
+ 'CPPPATH': ['#/src/third_party/zstandard/zstd/lib'],
}
if not use_system_version_of_library('google-benchmark'):
thirdPartyEnvironmentModifications['benchmark'] = {
- 'CPPPATH' : ['#/src/third_party/benchmark/dist/include'],
+ 'CPPPATH': ['#/src/third_party/benchmark/dist/include'],
}
# TODO: figure out if we want to offer system versions of mozjs. Mozilla
@@ -146,24 +131,23 @@ if not use_system_version_of_library('google-benchmark'):
#if not use_system_version_of_library('mozjs'):
if True:
thirdPartyEnvironmentModifications['mozjs'] = {
- 'CPPPATH' : [
+ 'CPPPATH': [
'#/src/third_party/mozjs/include',
'#/src/third_party/mozjs/mongo_sources',
- '#/src/third_party/mozjs/platform/' + env["TARGET_ARCH"] + "/" + env["TARGET_OS"] + "/include",
- ],
- 'FORCEINCLUDES' : [
- 'js-config.h',
+ '#/src/third_party/mozjs/platform/' + env["TARGET_ARCH"] + "/" + env["TARGET_OS"] +
+ "/include",
],
+ 'FORCEINCLUDES': ['js-config.h', ],
}
if "tom" in env["MONGO_CRYPTO"]:
thirdPartyEnvironmentModifications['tomcrypt'] = {
- 'CPPPATH' : ['#/src/third_party/tomcrypt' + tomcryptSuffix + '/src/headers'],
+ 'CPPPATH': ['#/src/third_party/tomcrypt' + tomcryptSuffix + '/src/headers'],
}
if not use_system_version_of_library('stemmer'):
thirdPartyEnvironmentModifications['stemmer'] = {
- 'CPPPATH' : ['#/src/third_party/libstemmer_c/include'],
+ 'CPPPATH': ['#/src/third_party/libstemmer_c/include'],
}
# Note that the wiredtiger.h header is generated, so
@@ -171,46 +155,50 @@ if not use_system_version_of_library('stemmer'):
# the source directory.
if wiredtiger and not use_system_version_of_library('wiredtiger'):
thirdPartyEnvironmentModifications['wiredtiger'] = {
- 'CPPPATH' : ['$BUILD_DIR/third_party/wiredtiger'],
+ 'CPPPATH': ['$BUILD_DIR/third_party/wiredtiger'],
}
if not use_system_version_of_library('yaml'):
thirdPartyEnvironmentModifications['yaml'] = {
- 'CPPPATH' : ['#/src/third_party/yaml-cpp/yaml-cpp/include'],
- 'CPPDEFINES' : ['_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING'] if env.ToolchainIs('msvc') else [],
+ 'CPPPATH': ['#/src/third_party/yaml-cpp/yaml-cpp/include'],
+ 'CPPDEFINES': ['_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING']
+ if env.ToolchainIs('msvc') else [],
}
if not use_system_version_of_library('asio'):
thirdPartyEnvironmentModifications['asio'] = {
- 'CPPPATH' : ['#/src/third_party/asio-master/asio/include'],
+ 'CPPPATH': ['#/src/third_party/asio-master/asio/include'],
}
if not use_system_version_of_library('intel_decimal128'):
thirdPartyEnvironmentModifications['intel_decimal128'] = {
- 'CPPPATH' : ['#/src/third_party/IntelRDFPMathLib20U1/LIBRARY'],
+ 'CPPPATH': ['#/src/third_party/IntelRDFPMathLib20U1/LIBRARY'],
}
if not use_system_version_of_library('icu'):
thirdPartyEnvironmentModifications['icu'] = {
- 'CPPPATH' : ['#/src/third_party/icu4c' + icuSuffix + '/source/common',
- '#/src/third_party/icu4c' + icuSuffix + '/source/i18n'],
+ 'CPPPATH': [
+ '#/src/third_party/icu4c' + icuSuffix + '/source/common',
+ '#/src/third_party/icu4c' + icuSuffix + '/source/i18n',
+ ],
}
if not use_system_version_of_library('kms-message'):
thirdPartyEnvironmentModifications['kms-message'] = {
- 'CPPPATH' : ['#/src/third_party/kms-message/src'],
- 'CPPDEFINES' :['KMS_MSG_STATIC']
+ 'CPPPATH': ['#/src/third_party/kms-message/src'],
+ 'CPPDEFINES': ['KMS_MSG_STATIC'],
}
if use_system_libunwind:
thirdPartyEnvironmentModifications['unwind'] = {
- 'SYSLIBDEPS_PRIVATE' : [env['LIBDEPS_UNWIND_SYSLIBDEP'], env['LIBDEPS_LZMA_SYSLIBDEP']],
+ 'SYSLIBDEPS_PRIVATE': [env['LIBDEPS_UNWIND_SYSLIBDEP'], env['LIBDEPS_LZMA_SYSLIBDEP']],
}
elif use_vendored_libunwind:
thirdPartyEnvironmentModifications['unwind'] = {
- 'SYSLIBDEPS_PRIVATE' : [env['LIBDEPS_LZMA_SYSLIBDEP']],
+ 'SYSLIBDEPS_PRIVATE': [env['LIBDEPS_LZMA_SYSLIBDEP']],
}
+
def injectThirdParty(thisEnv, libraries=[], parts=[]):
libraries = thisEnv.Flatten([libraries])
parts = thisEnv.Flatten([parts])
@@ -222,7 +210,8 @@ def injectThirdParty(thisEnv, libraries=[], parts=[]):
thisEnv.PrependUnique(**mods)
else:
for part in parts:
- thisEnv.PrependUnique({part : mods[part]})
+ thisEnv.PrependUnique({part: mods[part]})
+
env.AddMethod(injectThirdParty, 'InjectThirdParty')
@@ -233,7 +222,7 @@ env = env.Clone()
# different empty source file for every third-party library, as we did
# in the past.
-empty_source=env.Textfile(
+empty_source = env.Textfile(
target='third_party_shim.cpp',
source=str(),
)
@@ -244,6 +233,7 @@ empty_object = env.LibraryObject(
source=empty_source,
)
+
def shim_library(env, name, **kwargs):
# Add the 'virtual-libdep' tag, which will prevent shim libraries
# from actually being linked to. They don't provide any symbols,
@@ -260,12 +250,13 @@ def shim_library(env, name, **kwargs):
AIB_IGNORE=True,
**kwargs,
)
+
+
env.AddMethod(shim_library, 'ShimLibrary')
murmurEnv = env.Clone()
murmurEnv.InjectThirdParty(libraries=['fmt'])
-murmurEnv.SConscript('murmurhash3/SConscript', exports={ 'env' : murmurEnv })
-
+murmurEnv.SConscript('murmurhash3/SConscript', exports={'env': murmurEnv})
s2Env = env.Clone()
s2Env.InjectThirdParty(libraries=[
@@ -277,19 +268,16 @@ s2Env.InjectThirdParty(libraries=[
'variant',
])
s2Env.InjectMongoIncludePaths()
-s2Env.SConscript('s2/SConscript', exports={'env' : s2Env})
+s2Env.SConscript('s2/SConscript', exports={'env': s2Env})
if use_libunwind:
- unwindEnv = env.Clone(
- LIBDEPS_NO_INHERIT=[
- '$BUILD_DIR/third_party/shim_allocator',
- ],
- )
+ unwindEnv = env.Clone(LIBDEPS_NO_INHERIT=[
+ '$BUILD_DIR/third_party/shim_allocator',
+ ], )
if use_system_libunwind:
- unwindEnv = unwindEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_UNWIND_SYSLIBDEP'],
- ])
+ unwindEnv = unwindEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_UNWIND_SYSLIBDEP'],
+ ])
else:
unwindEnv = unwindEnv.Clone()
@@ -299,71 +287,61 @@ if use_libunwind:
unwindEnv['ASPPCOM'] = unwindEnv['ASPPCOM'].replace('$CC ', '$ASPP ')
def registerConsumerModifications(env, **kwargs):
- for k,v in kwargs.items():
+ for k, v in kwargs.items():
thirdPartyEnvironmentModifications['unwind'][k] = v
unwindEnv.AddMethod(registerConsumerModifications, 'RegisterConsumerModifications')
- unwindEnv.SConscript('unwind/SConscript', exports={'env' : unwindEnv})
- unwindEnv.Append(
- LIBDEPS_INTERFACE=[
- 'unwind/unwind',
- ])
-
- unwindEnv.ShimLibrary(
- name="unwind",
- )
+ unwindEnv.SConscript('unwind/SConscript', exports={'env': unwindEnv})
+ unwindEnv.Append(LIBDEPS_INTERFACE=[
+ 'unwind/unwind',
+ ])
+
+ unwindEnv.ShimLibrary(name="unwind", )
fmtEnv = env.Clone()
if use_system_version_of_library("fmt"):
- fmtEnv = fmtEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_FMT_SYSLIBDEP'],
- ])
+ fmtEnv = fmtEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_FMT_SYSLIBDEP'],
+ ])
else:
fmtEnv = fmtEnv.Clone()
fmtEnv.InjectThirdParty(libraries=['fmt'])
fmtEnv.InjectMongoIncludePaths()
- fmtEnv.SConscript('fmt/SConscript', exports={'env' : fmtEnv})
- fmtEnv = fmtEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'fmt/fmt',
- ])
+ fmtEnv.SConscript('fmt/SConscript', exports={'env': fmtEnv})
+ fmtEnv = fmtEnv.Clone(LIBDEPS_INTERFACE=[
+ 'fmt/fmt',
+ ])
fmtEnv.ShimLibrary(name="fmt")
-
pcreEnv = env.Clone()
if use_system_version_of_library("pcre"):
- pcreEnv = pcreEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_PCRE_SYSLIBDEP'],
- env['LIBDEPS_PCRECPP_SYSLIBDEP'],
- ])
+ pcreEnv = pcreEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_PCRE_SYSLIBDEP'],
+ env['LIBDEPS_PCRECPP_SYSLIBDEP'],
+ ])
else:
pcreEnv = pcreEnv.Clone()
pcreEnv.InjectThirdParty(libraries=['pcre'])
- pcreEnv.SConscript('pcre' + pcreSuffix + '/SConscript', exports={ 'env' : pcreEnv })
- pcreEnv = pcreEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'pcre' + pcreSuffix + '/pcrecpp',
- ])
+ pcreEnv.SConscript('pcre' + pcreSuffix + '/SConscript', exports={'env': pcreEnv})
+ pcreEnv = pcreEnv.Clone(LIBDEPS_INTERFACE=[
+ 'pcre' + pcreSuffix + '/pcrecpp',
+ ])
pcreEnv.ShimLibrary(name="pcrecpp")
pcre2Env = env.Clone()
if use_system_version_of_library("pcre2"):
- pcre2Env = pcre2Env.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_PCRE2_SYSLIBDEP'],
- ])
+ pcre2Env = pcre2Env.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_PCRE2_SYSLIBDEP'],
+ ])
else:
pcre2Env = pcre2Env.Clone()
pcre2Env.InjectThirdParty(libraries=['pcre2'])
- pcre2Env.SConscript('pcre2' + '/SConscript', exports={ 'env' : pcre2Env })
- pcre2Env = pcre2Env.Clone(
- LIBDEPS_INTERFACE=[
- 'pcre2/pcre2',
- ])
+ pcre2Env.SConscript('pcre2' + '/SConscript', exports={'env': pcre2Env})
+ pcre2Env = pcre2Env.Clone(LIBDEPS_INTERFACE=[
+ 'pcre2/pcre2',
+ ])
pcre2Env.ShimLibrary(name="pcre2")
@@ -372,170 +350,153 @@ if use_system_version_of_library("boost"):
# On windows, we don't need the syslibdeps because autolib will select the right libraries
# for us automatically.
if not env.TargetOSIs('windows'):
- boostEnv = boostEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_BOOST_PROGRAM_OPTIONS_SYSLIBDEP'],
- env['LIBDEPS_BOOST_FILESYSTEM_SYSLIBDEP'],
- env['LIBDEPS_BOOST_SYSTEM_SYSLIBDEP'],
- env['LIBDEPS_BOOST_IOSTREAMS_SYSLIBDEP'],
- env['LIBDEPS_BOOST_THREAD_SYSLIBDEP'],
- env['LIBDEPS_BOOST_LOG_SYSLIBDEP'],
- ])
+ boostEnv = boostEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_BOOST_PROGRAM_OPTIONS_SYSLIBDEP'],
+ env['LIBDEPS_BOOST_FILESYSTEM_SYSLIBDEP'],
+ env['LIBDEPS_BOOST_SYSTEM_SYSLIBDEP'],
+ env['LIBDEPS_BOOST_IOSTREAMS_SYSLIBDEP'],
+ env['LIBDEPS_BOOST_THREAD_SYSLIBDEP'],
+ env['LIBDEPS_BOOST_LOG_SYSLIBDEP'],
+ ])
else:
boostDirectory = 'boost'
boostEnv = boostEnv.Clone()
boostEnv.InjectThirdParty(libraries=['boost'])
- boostEnv.SConscript(boostDirectory + '/SConscript', exports={ 'env' : boostEnv })
- boostEnv = boostEnv.Clone(
- LIBDEPS_INTERFACE=[
- boostDirectory + '/boost_filesystem',
- boostDirectory + '/boost_iostreams',
- boostDirectory + '/boost_log',
- boostDirectory + '/boost_program_options',
- boostDirectory + '/boost_system',
- ])
+ boostEnv.SConscript(boostDirectory + '/SConscript', exports={'env': boostEnv})
+ boostEnv = boostEnv.Clone(LIBDEPS_INTERFACE=[
+ boostDirectory + '/boost_filesystem',
+ boostDirectory + '/boost_iostreams',
+ boostDirectory + '/boost_log',
+ boostDirectory + '/boost_program_options',
+ boostDirectory + '/boost_system',
+ ])
boostEnv.ShimLibrary(name="boost")
abseilDirectory = 'abseil-cpp-master'
abseilEnv = env.Clone()
abseilEnv.InjectThirdParty(libraries=['abseil-cpp'])
-abseilEnv.SConscript(abseilDirectory + '/SConscript', exports={ 'env' : abseilEnv })
-abseilEnv = abseilEnv.Clone(
- LIBDEPS_INTERFACE=[
- abseilDirectory + '/absl_container',
- abseilDirectory + '/absl_hash',
- abseilDirectory + '/absl_numeric',
- ])
+abseilEnv.SConscript(abseilDirectory + '/SConscript', exports={'env': abseilEnv})
+abseilEnv = abseilEnv.Clone(LIBDEPS_INTERFACE=[
+ abseilDirectory + '/absl_container',
+ abseilDirectory + '/absl_hash',
+ abseilDirectory + '/absl_numeric',
+])
abseilEnv.ShimLibrary(name="abseil")
snappyEnv = env.Clone()
if use_system_version_of_library("snappy"):
- snappyEnv = snappyEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_SNAPPY_SYSLIBDEP'],
- ])
+ snappyEnv = snappyEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_SNAPPY_SYSLIBDEP'],
+ ])
else:
snappyEnv = snappyEnv.Clone()
snappyEnv.InjectThirdParty(libraries=['snappy'])
snappyEnv.InjectMongoIncludePaths()
- snappyEnv.SConscript('snappy' + snappySuffix + '/SConscript', exports={ 'env' : snappyEnv })
- snappyEnv = snappyEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'snappy' + snappySuffix + '/snappy',
- ])
+ snappyEnv.SConscript('snappy' + snappySuffix + '/SConscript', exports={'env': snappyEnv})
+ snappyEnv = snappyEnv.Clone(LIBDEPS_INTERFACE=[
+ 'snappy' + snappySuffix + '/snappy',
+ ])
snappyEnv.ShimLibrary(name="snappy")
zlibEnv = env.Clone()
if use_system_version_of_library("zlib"):
- zlibEnv = zlibEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_ZLIB_SYSLIBDEP'],
- ])
+ zlibEnv = zlibEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_ZLIB_SYSLIBDEP'],
+ ])
else:
zlibEnv = zlibEnv.Clone()
zlibEnv.InjectThirdParty(libraries=['zlib'])
- zlibEnv.SConscript('zlib' + zlibSuffix + '/SConscript', exports={ 'env' : zlibEnv })
- zlibEnv = zlibEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'zlib' + zlibSuffix + '/zlib',
- ])
+ zlibEnv.SConscript('zlib' + zlibSuffix + '/SConscript', exports={'env': zlibEnv})
+ zlibEnv = zlibEnv.Clone(LIBDEPS_INTERFACE=[
+ 'zlib' + zlibSuffix + '/zlib',
+ ])
-zlibEnv.ShimLibrary(
- name="zlib",
-)
+zlibEnv.ShimLibrary(name="zlib", )
zstdEnv = env.Clone()
if use_system_version_of_library("zstd"):
- zstdEnv = zstdEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_ZSTD_SYSLIBDEP'],
- ])
+ zstdEnv = zstdEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_ZSTD_SYSLIBDEP'],
+ ])
else:
zstdEnv = zstdEnv.Clone()
zstdEnv.InjectThirdParty(libraries=['zstd'])
- zstdEnv.SConscript('zstandard/SConscript', exports={ 'env' : zstdEnv })
- zstdEnv = zstdEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'zstandard/zstd',
- ])
+ zstdEnv.SConscript('zstandard/SConscript', exports={'env': zstdEnv})
+ zstdEnv = zstdEnv.Clone(LIBDEPS_INTERFACE=[
+ 'zstandard/zstd',
+ ])
-zstdEnv.ShimLibrary(
- name="zstd",
-)
+zstdEnv.ShimLibrary(name="zstd", )
benchmarkEnv = env.Clone()
if use_system_version_of_library("google-benchmark"):
- benchmarkEnv = benchmarkEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_BENCHMARK_SYSLIBDEP'],
- ])
+ benchmarkEnv = benchmarkEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_BENCHMARK_SYSLIBDEP'],
+ ])
else:
benchmarkEnv = benchmarkEnv.Clone()
benchmarkEnv.InjectThirdParty(libraries=['benchmark'])
benchmarkEnv.SConscript(
'benchmark/SConscript',
- exports={ 'env' : benchmarkEnv })
- benchmarkEnv = benchmarkEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'benchmark/benchmark',
- ])
+ exports={'env': benchmarkEnv},
+ )
+ benchmarkEnv = benchmarkEnv.Clone(LIBDEPS_INTERFACE=[
+ 'benchmark/benchmark',
+ ])
benchmarkEnv.ShimLibrary(name="benchmark")
if jsEngine:
mozjsEnv = env.Clone()
- mozjsEnv.SConscript('mozjs/SConscript', exports={'env' : mozjsEnv })
- mozjsEnv = mozjsEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'mozjs/mozjs',
- 'shim_zlib',
- ])
+ mozjsEnv.SConscript('mozjs/SConscript', exports={'env': mozjsEnv})
+ mozjsEnv = mozjsEnv.Clone(LIBDEPS_INTERFACE=[
+ 'mozjs/mozjs',
+ 'shim_zlib',
+ ])
mozjsEnv.ShimLibrary(name="mozjs")
if "tom" in env["MONGO_CRYPTO"]:
tomcryptEnv = env.Clone()
- tomcryptEnv.SConscript('tomcrypt' + tomcryptSuffix + '/SConscript', exports={'env' : tomcryptEnv })
- tomcryptEnv = tomcryptEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'tomcrypt' + tomcryptSuffix + '/tomcrypt',
- ])
-
- tomcryptEnv.ShimLibrary(
- name="tomcrypt",
+ tomcryptEnv.SConscript(
+ 'tomcrypt' + tomcryptSuffix + '/SConscript',
+ exports={'env': tomcryptEnv},
)
+ tomcryptEnv = tomcryptEnv.Clone(LIBDEPS_INTERFACE=[
+ 'tomcrypt' + tomcryptSuffix + '/tomcrypt',
+ ])
+ tomcryptEnv.ShimLibrary(name="tomcrypt", )
-gperftoolsEnv = env.Clone(
- LIBDEPS_NO_INHERIT=[
- '$BUILD_DIR/third_party/shim_allocator',
- ],
-)
+gperftoolsEnv = env.Clone(LIBDEPS_NO_INHERIT=[
+ '$BUILD_DIR/third_party/shim_allocator',
+], )
if gperftoolsEnv['MONGO_ALLOCATOR'] in ["tcmalloc", "tcmalloc-experimental"]:
if use_system_version_of_library("tcmalloc"):
- gperftoolsEnv = gperftoolsEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_TCMALLOC_SYSLIBDEP'],
+ gperftoolsEnv = gperftoolsEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_TCMALLOC_SYSLIBDEP'],
])
else:
gperftoolsEnv = gperftoolsEnv.Clone()
gperftoolsEnv.InjectThirdParty(libraries=['gperftools'])
+
# Allow gperftools to determine its own consumer-side include/ dirs.
# Needed because those are in a platform-specific subdirectory.
def registerConsumerModifications(env, **kwargs):
for k, v in kwargs.items():
thirdPartyEnvironmentModifications['gperftools'][k] = v
+
gperftoolsEnv.AddMethod(registerConsumerModifications, 'RegisterConsumerModifications')
gperftoolsEnv.SConscript(
'gperftools' + '/SConscript',
- exports={'env' : gperftoolsEnv})
- gperftoolsEnv = gperftoolsEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'gperftools/tcmalloc_minimal',
- ])
+ exports={'env': gperftoolsEnv},
+ )
+ gperftoolsEnv = gperftoolsEnv.Clone(LIBDEPS_INTERFACE=[
+ 'gperftools/tcmalloc_minimal',
+ ])
gperftoolsEnv.ShimLibrary(
name="allocator",
@@ -549,72 +510,61 @@ gperftoolsEnv.ShimLibrary(
# This tag allows this dependency to be linked to nodes marked as not
# allowed to have public dependencies.
'lint-public-dep-allowed'
- ]
+ ],
)
-
stemmerEnv = env.Clone()
if use_system_version_of_library("stemmer"):
- stemmerEnv = stemmerEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_STEMMER_SYSLIBDEP'],
- ])
+ stemmerEnv = stemmerEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_STEMMER_SYSLIBDEP'],
+ ])
else:
stemmerEnv = stemmerEnv.Clone()
stemmerEnv.InjectThirdParty(libraries=['stemmer'])
- stemmerEnv.SConscript('libstemmer_c/SConscript', exports={ 'env' : stemmerEnv })
- stemmerEnv = stemmerEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'libstemmer_c/stemmer',
- ])
+ stemmerEnv.SConscript('libstemmer_c/SConscript', exports={'env': stemmerEnv})
+ stemmerEnv = stemmerEnv.Clone(LIBDEPS_INTERFACE=[
+ 'libstemmer_c/stemmer',
+ ])
stemmerEnv.ShimLibrary(name="stemmer")
-
yamlEnv = env.Clone()
if use_system_version_of_library("yaml"):
- yamlEnv = yamlEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_YAML_SYSLIBDEP'],
- ])
+ yamlEnv = yamlEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_YAML_SYSLIBDEP'],
+ ])
else:
yamlEnv = yamlEnv.Clone()
yamlEnv.InjectThirdParty(libraries=['yaml', 'boost'])
- yamlEnv.SConscript('yaml-cpp/SConscript', exports={ 'env' : yamlEnv })
- yamlEnv = yamlEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'yaml-cpp/yaml',
- ])
+ yamlEnv.SConscript('yaml-cpp/SConscript', exports={'env': yamlEnv})
+ yamlEnv = yamlEnv.Clone(LIBDEPS_INTERFACE=[
+ 'yaml-cpp/yaml',
+ ])
yamlEnv.ShimLibrary(name="yaml")
timelibEnv = env.Clone()
timelibEnv.InjectThirdParty(libraries=['timelib'])
-timelibEnv.SConscript('timelib' + timelibSuffix + '/SConscript', exports={ 'env' : timelibEnv })
-timelibEnv = timelibEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'timelib' + timelibSuffix + '/timelib',
- ])
+timelibEnv.SConscript('timelib' + timelibSuffix + '/SConscript', exports={'env': timelibEnv})
+timelibEnv = timelibEnv.Clone(LIBDEPS_INTERFACE=[
+ 'timelib' + timelibSuffix + '/timelib',
+])
-timelibEnv.ShimLibrary(
- name='timelib',
-)
+timelibEnv.ShimLibrary(name='timelib', )
wiredtigerEnv = env.Clone()
if wiredtiger:
if use_system_version_of_library("wiredtiger"):
- wiredtigerEnv = wiredtigerEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_WIREDTIGER_SYSLIBDEP'],
- ])
+ wiredtigerEnv = wiredtigerEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_WIREDTIGER_SYSLIBDEP'],
+ ])
else:
wiredtigerEnv = wiredtigerEnv.Clone()
wiredtigerEnv.InjectThirdParty(libraries=['wiredtiger'])
- wiredtigerEnv.SConscript('wiredtiger/SConscript', exports={ 'env' : wiredtigerEnv })
- wiredtigerEnv = wiredtigerEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'wiredtiger/wiredtiger',
- ])
+ wiredtigerEnv.SConscript('wiredtiger/SConscript', exports={'env': wiredtigerEnv})
+ wiredtigerEnv = wiredtigerEnv.Clone(LIBDEPS_INTERFACE=[
+ 'wiredtiger/wiredtiger',
+ ])
wiredtigerEnv.ShimLibrary(name="wiredtiger")
@@ -627,71 +577,60 @@ if use_system_version_of_library("asio"):
else:
asioEnv = asioEnv.Clone()
asioEnv.InjectThirdParty(libraries=['asio'])
- asioEnv.SConscript('asio-master/SConscript', exports={ 'env' : asioEnv })
- asioEnv = asioEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'asio-master/asio',
- ])
+ asioEnv.SConscript('asio-master/SConscript', exports={'env': asioEnv})
+ asioEnv = asioEnv.Clone(LIBDEPS_INTERFACE=[
+ 'asio-master/asio',
+ ])
asioEnv.ShimLibrary(name="asio")
intelDecimal128Env = env.Clone()
if use_system_version_of_library("intel_decimal128"):
- intelDecimal128Env = intelDecimal128Env.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_INTEL_DECIMAL128_SYSLIBDEP'],
- ])
+ intelDecimal128Env = intelDecimal128Env.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_INTEL_DECIMAL128_SYSLIBDEP'],
+ ])
else:
intelDecimal128Env = intelDecimal128Env.Clone()
intelDecimal128Env.InjectThirdParty(libraries=['intel_decimal128'])
- intelDecimal128Env.SConscript('IntelRDFPMathLib20U1/SConscript', exports={ 'env' : intelDecimal128Env })
- intelDecimal128Env = intelDecimal128Env.Clone(
- LIBDEPS_INTERFACE=[
- 'IntelRDFPMathLib20U1/intel_decimal128',
- ])
-
-intelDecimal128Env.ShimLibrary(
- name="intel_decimal128",
-)
+ intelDecimal128Env.SConscript(
+ 'IntelRDFPMathLib20U1/SConscript',
+ exports={'env': intelDecimal128Env},
+ )
+ intelDecimal128Env = intelDecimal128Env.Clone(LIBDEPS_INTERFACE=[
+ 'IntelRDFPMathLib20U1/intel_decimal128',
+ ])
+
+intelDecimal128Env.ShimLibrary(name="intel_decimal128", )
icuEnv = env.Clone()
if use_system_version_of_library("icu"):
- icuEnv = icuEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_ICUDATA_SYSLIBDEP'],
- env['LIBDEPS_ICUI18N_SYSLIBDEP'],
- env['LIBDEPS_ICUUC_SYSLIBDEP'],
- ])
+ icuEnv = icuEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_ICUDATA_SYSLIBDEP'],
+ env['LIBDEPS_ICUI18N_SYSLIBDEP'],
+ env['LIBDEPS_ICUUC_SYSLIBDEP'],
+ ])
else:
icuEnv = icuEnv.Clone()
icuEnv.InjectThirdParty(libraries=['icu'])
- icuEnv.SConscript('icu4c' + icuSuffix + '/source/SConscript', exports={ 'env' : icuEnv })
- icuEnv = icuEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'icu4c' + icuSuffix + '/source/icu_i18n',
- ])
+ icuEnv.SConscript('icu4c' + icuSuffix + '/source/SConscript', exports={'env': icuEnv})
+ icuEnv = icuEnv.Clone(LIBDEPS_INTERFACE=[
+ 'icu4c' + icuSuffix + '/source/icu_i18n',
+ ])
-icuEnv.ShimLibrary(
- name="icu",
-)
+icuEnv.ShimLibrary(name="icu", )
kmsEnv = env.Clone()
if get_option('ssl') == 'on':
if use_system_version_of_library("kms-message"):
- kmsEnv = kmsEnv.Clone(
- SYSLIBDEPS=[
- env['LIBDEPS_KMS-MESSAGE_SYSLIBDEP'],
- ])
+ kmsEnv = kmsEnv.Clone(SYSLIBDEPS=[
+ env['LIBDEPS_KMS-MESSAGE_SYSLIBDEP'],
+ ])
else:
kmsEnv = kmsEnv.Clone()
kmsEnv.InjectThirdParty(libraries=['kms-message'])
- kmsEnv.SConscript('kms-message/SConscript', exports={ 'env' : kmsEnv })
- kmsEnv = kmsEnv.Clone(
- LIBDEPS_INTERFACE=[
- 'kms-message/kms-message',
- ])
-
- kmsEnv.ShimLibrary(
- name="kms_message",
- )
+ kmsEnv.SConscript('kms-message/SConscript', exports={'env': kmsEnv})
+ kmsEnv = kmsEnv.Clone(LIBDEPS_INTERFACE=[
+ 'kms-message/kms-message',
+ ])
+ kmsEnv.ShimLibrary(name="kms_message", )
diff --git a/src/third_party/abseil-cpp-master/SConscript b/src/third_party/abseil-cpp-master/SConscript
index 3e960562068..f65389ad7d4 100644
--- a/src/third_party/abseil-cpp-master/SConscript
+++ b/src/third_party/abseil-cpp-master/SConscript
@@ -6,9 +6,9 @@ env.InjectThirdParty(libraries=['abseil-cpp'])
env.Library(
target="absl_numeric",
- source = [
- "abseil-cpp/absl/numeric/int128.cc",
- ]
+ source=[
+ "abseil-cpp/absl/numeric/int128.cc",
+ ],
)
env.Library(
@@ -17,7 +17,7 @@ env.Library(
"abseil-cpp/absl/hash/internal/city.cc",
"abseil-cpp/absl/hash/internal/hash.cc",
"abseil-cpp/absl/hash/internal/wyhash.cc",
- ]
+ ],
)
env.Library(
@@ -28,7 +28,7 @@ env.Library(
LIBDEPS=[
"absl_hash",
"absl_throw_delegate",
- ]
+ ],
)
env.Library(
@@ -36,6 +36,5 @@ env.Library(
source=[
"abseil-cpp/absl/base/internal/throw_delegate.cc",
],
- LIBDEPS=[
- ]
+ LIBDEPS=[],
)
diff --git a/src/third_party/asio-master/SConscript b/src/third_party/asio-master/SConscript
index e0c423ce5b4..136f34ae6f0 100644
--- a/src/third_party/asio-master/SConscript
+++ b/src/third_party/asio-master/SConscript
@@ -11,5 +11,5 @@ env.InjectThirdParty(libraries=['boost'])
env.Library(
target="asio",
- source=asio_src
+ source=asio_src,
)
diff --git a/src/third_party/benchmark/SConscript b/src/third_party/benchmark/SConscript
index 8b2feb693c0..5b11b380ab6 100644
--- a/src/third_party/benchmark/SConscript
+++ b/src/third_party/benchmark/SConscript
@@ -21,27 +21,31 @@ env.Append(
],
)
-src_dir=env.Dir('dist/src')
+src_dir = env.Dir('dist/src')
env.Library(
target="benchmark",
- source=env.File([
- 'benchmark_api_internal.cc',
- 'benchmark.cc',
- 'benchmark_name.cc',
- 'benchmark_register.cc',
- 'benchmark_runner.cc',
- 'colorprint.cc',
- 'commandlineflags.cc',
- 'complexity.cc',
- 'console_reporter.cc',
- 'counter.cc',
- 'csv_reporter.cc',
- 'json_reporter.cc',
- 'reporter.cc',
- 'sleep.cc',
- 'statistics.cc',
- 'string_util.cc',
- 'sysinfo.cc',
- 'timers.cc',
- ], src_dir))
+ source=env.File(
+ [
+ 'benchmark_api_internal.cc',
+ 'benchmark.cc',
+ 'benchmark_name.cc',
+ 'benchmark_register.cc',
+ 'benchmark_runner.cc',
+ 'colorprint.cc',
+ 'commandlineflags.cc',
+ 'complexity.cc',
+ 'console_reporter.cc',
+ 'counter.cc',
+ 'csv_reporter.cc',
+ 'json_reporter.cc',
+ 'reporter.cc',
+ 'sleep.cc',
+ 'statistics.cc',
+ 'string_util.cc',
+ 'sysinfo.cc',
+ 'timers.cc',
+ ],
+ src_dir,
+ ),
+)
diff --git a/src/third_party/boost/SConscript b/src/third_party/boost/SConscript
index 2c7e4456d43..e017a7b1ea5 100644
--- a/src/third_party/boost/SConscript
+++ b/src/third_party/boost/SConscript
@@ -72,68 +72,70 @@ env.Library(
)
env.Library(
- target='boost_iostreams',
+ target='boost_iostreams',
source=[
'libs/iostreams/src/file_descriptor.cpp',
'libs/iostreams/src/mapped_file.cpp',
- ],
- LIBDEPS=[
- '$BUILD_DIR/mongo/util/boost_assert_shim',
- 'boost_system',
- ])
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/util/boost_assert_shim',
+ 'boost_system',
+ ],
+)
-boost_thread_source = dict(
- posix=[
- 'libs/thread/src/pthread/once.cpp',
- 'libs/thread/src/pthread/thread.cpp'
- ],
+boost_thread_source = dict(
+ posix=['libs/thread/src/pthread/once.cpp', 'libs/thread/src/pthread/thread.cpp'],
windows=[
- 'libs/thread/src/win32/thread.cpp',
+ 'libs/thread/src/win32/thread.cpp',
'libs/thread/src/win32/thread_primitives.cpp',
'libs/thread/src/win32/tss_dll.cpp',
'libs/thread/src/win32/tss_pe.cpp',
- ]
-).get(env['TARGET_OS_FAMILY'], 'UNKNOWN_OS_FAMILY_FOR_BOOST_THREADS__%s' % env['TARGET_OS_FAMILY'])
+ ],
+).get(
+ env['TARGET_OS_FAMILY'],
+ 'UNKNOWN_OS_FAMILY_FOR_BOOST_THREADS__%s' % env['TARGET_OS_FAMILY'],
+)
-boost_thread_defines = dict(
- posix=['BOOST_THREAD_PTHREAD'],
- windows=['BOOST_THREAD_WIN32']
-).get(env['TARGET_OS_FAMILY'], 'UNKNOWN_OS_FAMILY_FOR_BOOST_THREADS__%s' % env['TARGET_OS_FAMILY'])
+boost_thread_defines = dict(
+ posix=['BOOST_THREAD_PTHREAD'],
+ windows=['BOOST_THREAD_WIN32'],
+).get(
+ env['TARGET_OS_FAMILY'],
+ 'UNKNOWN_OS_FAMILY_FOR_BOOST_THREADS__%s' % env['TARGET_OS_FAMILY'],
+)
threadlib_env = env.Clone()
threadlib_env.Append(CPPDEFINES=['BOOST_THREAD_BUILD_LIB'] + boost_thread_defines)
threadlib_env.Library(
- target='boost_thread',
+ target='boost_thread',
source=[
'libs/thread/src/future.cpp',
] + boost_thread_source,
LIBDEPS=[
'$BUILD_DIR/mongo/util/boost_assert_shim',
- ]
+ ],
)
loglib_env = env.Clone()
-loglib_env.AppendUnique(
- CPPDEFINES=[
- 'BOOST_LOG_USE_COMPILER_TLS',
- 'BOOST_LOG_USE_STD_REGEX',
- 'BOOST_LOG_WITHOUT_DEFAULT_FACTORIES',
- 'BOOST_LOG_WITHOUT_IPC',
- 'BOOST_LOG_WITHOUT_SETTINGS_PARSERS',
- ])
+loglib_env.AppendUnique(CPPDEFINES=[
+ 'BOOST_LOG_USE_COMPILER_TLS',
+ 'BOOST_LOG_USE_STD_REGEX',
+ 'BOOST_LOG_WITHOUT_DEFAULT_FACTORIES',
+ 'BOOST_LOG_WITHOUT_IPC',
+ 'BOOST_LOG_WITHOUT_SETTINGS_PARSERS',
+])
if env.ToolchainIs('msvc'):
- # warning C4828: The file contains a character starting at offset 0x6009 that is illegal in
+ # warning C4828: The file contains a character starting at offset 0x6009 that is illegal in
# the current source character set (codepage 65001).
loglib_env.AppendUnique(CCFLAGS=['/wd4828'])
# permissions.cpp includes windows.h directly, causing issues with boost winapi (error C2116 and C2733)
- loglib_env.AppendUnique(
- CPPDEFINES=[
+ loglib_env.AppendUnique(CPPDEFINES=[
'BOOST_USE_WINDOWS_H',
])
loglib_env.Library(
- target='boost_log',
+ target='boost_log',
source=[
'libs/log/src/attribute_name.cpp',
'libs/log/src/attribute_set.cpp',
@@ -172,5 +174,5 @@ loglib_env.Library(
'$BUILD_DIR/mongo/util/boost_assert_shim',
'boost_filesystem',
'boost_thread',
- ]
+ ],
)
diff --git a/src/third_party/fmt/SConscript b/src/third_party/fmt/SConscript
index e239c9070ad..80a5f66520a 100644
--- a/src/third_party/fmt/SConscript
+++ b/src/third_party/fmt/SConscript
@@ -15,4 +15,5 @@ env.Library(
source=env.File([
'format.cc',
'os.cc',
- ], 'dist/src'))
+ ], 'dist/src'),
+)
diff --git a/src/third_party/gperftools/SConscript b/src/third_party/gperftools/SConscript
index 7589626149d..21ec1fb90f0 100644
--- a/src/third_party/gperftools/SConscript
+++ b/src/third_party/gperftools/SConscript
@@ -9,8 +9,7 @@ Import("selected_experimental_optimizations")
env = env.Clone(
# Building with hidden visibility interferes with intercepting the
# libc allocation functions.
- DISALLOW_VISHIDDEN=True,
-)
+ DISALLOW_VISHIDDEN=True, )
# If we don't have a frame pointer, we need to tell tcmalloc so that
# it doesn't try to select a frame pointer based unwinder like
@@ -35,16 +34,13 @@ if not fp or debugBuild:
CPPDEFINES=[
'NO_FRAME_POINTER',
'TCMALLOC_DONT_PREFER_LIBUNWIND' if debugBuild else None,
- ],
- )
+ ], )
if use_libunwind:
- env.Append(
- CPPDEFINES=[
- ("HAVE_LIBUNWIND_H", "1"),
- 'HAVE_UCONTEXT_H',
- ],
- )
+ env.Append(CPPDEFINES=[
+ ("HAVE_LIBUNWIND_H", "1"),
+ 'HAVE_UCONTEXT_H',
+ ], )
env.InjectThirdParty(libraries=['unwind'])
files = [
@@ -69,7 +65,7 @@ files = [
'src/static_vars.cc',
'src/symbolize.cc',
'src/thread_cache.cc',
- ]
+]
if env.TargetOSIs('windows'):
files += [
@@ -77,7 +73,7 @@ if env.TargetOSIs('windows'):
'src/windows/port.cc',
'src/windows/system-alloc.cc',
'src/fake_stacktrace_scope.cc',
- ]
+ ]
# warning C4141: 'inline': used more than once
# warning C4305: 'argument': truncation from 'ssize_t' to 'double'
@@ -88,7 +84,7 @@ else:
'src/emergency_malloc_for_stacktrace.cc',
'src/maybe_threads.cc',
'src/system-alloc.cc',
- ]
+ ]
if not debugBuild:
files += ['src/tcmalloc.cc']
@@ -98,9 +94,7 @@ else:
# gperftools has some sloppy write calls that emit warnings
env.Append(CXXFLAGS=["-Wno-unused-result"])
-env.Append(
- CPPDEFINES=["NO_HEAP_CHECK"],
-)
+env.Append(CPPDEFINES=["NO_HEAP_CHECK"], )
# The build system doesn't define NDEBUG globally for historical reasons, however, TCMalloc
# expects that NDEBUG is used to select between preferring the mmap or the sbrk allocator. For
@@ -108,7 +102,7 @@ env.Append(
# production deployment configuration. See the use of NDEBUG and kDebugMode in
# src/system-alloc.cc for more details.
if not debugBuild:
- env.Append( CPPDEFINES=["NDEBUG"] )
+ env.Append(CPPDEFINES=["NDEBUG"])
# For debug builds we want to capture stacks during (de)allocations,
# but we don't want to pay that cost for release builds. For non-debug
@@ -122,18 +116,22 @@ if (not debugBuild) or (not env['TARGET_ARCH'] in ['x86_64', 'i386']):
gperftools_root = env.Dir("#/src/third_party/gperftools")
gperftools_platform = gperftools_root.Dir("platform/${TARGET_OS}_${TARGET_ARCH}")
-env.Append(CPPPATH=[gperftools_platform.Dir("internal/src"),
- gperftools_root.Dir("dist/src")])
+env.Append(CPPPATH=[
+ gperftools_platform.Dir("internal/src"),
+ gperftools_root.Dir("dist/src"),
+])
# propagates to consumers that Inject (depend on) gperftools.
env.RegisterConsumerModifications(CPPPATH=[gperftools_platform.Dir("include")])
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
env['CCFLAGS_WERROR'] = []
env['CXXFLAGS_WERROR'] = []
for to_remove in ["-Wsign-compare", "-Wall"]:
@@ -146,13 +144,11 @@ if not env.TargetOSIs('windows'):
# GCC on PowerPC under C++11 mode does not define __linux which gperftools depends on
if env['TARGET_ARCH'] == 'ppc64le':
- env.Append( CPPDEFINES=[ "__linux"] )
+ env.Append(CPPDEFINES=["__linux"])
env.Library(
target='tcmalloc_minimal',
- source=[
- env.Dir('dist').File(f) for f in files
- ],
+ source=[env.Dir('dist').File(f) for f in files],
LIBDEPS_PRIVATE=[
'$BUILD_DIR/third_party/shim_unwind' if use_libunwind else [],
],
diff --git a/src/third_party/icu4c-57.1/source/SConscript b/src/third_party/icu4c-57.1/source/SConscript
index 35267e35a25..41b199eaf36 100644
--- a/src/third_party/icu4c-57.1/source/SConscript
+++ b/src/third_party/icu4c-57.1/source/SConscript
@@ -17,8 +17,7 @@ env.Append(
('U_DISABLE_RENAMING', 1),
('U_STATIC_IMPLEMENTATION', 1),
('U_USING_ICU_NAMESPACE', 0),
- ],
-)
+ ], )
if env.TargetOSIs('solaris'):
# On Solaris, compile of certain files fails if the below define is not enabled. Specifically,
@@ -28,11 +27,10 @@ if env.TargetOSIs('solaris'):
# error because XPG4v2 programs are incompatible with C99. If we keep _XOPEN_SOURCE=600 but
# force _XOPEN_SOURCE_EXTENDED=0, then <sys/feature_tests.h> chooses XPG6, which resolves the
# error (since XPG6 is compatible with C99).
- env.Append(
- CPPDEFINES=[
- ('_XOPEN_SOURCE_EXTENDED', 0),
- ],
- )
+ env.Append(CPPDEFINES=[
+ ('_XOPEN_SOURCE_EXTENDED', 0),
+ ], )
+
def removeIfPresent(lst, item):
try:
@@ -40,6 +38,7 @@ def removeIfPresent(lst, item):
except ValueError:
pass
+
env['CCFLAGS_WERROR'] = []
for to_remove in ['-Wall', '-W']:
removeIfPresent(env['CCFLAGS'], to_remove)
@@ -52,7 +51,6 @@ if env.TargetOSIs('windows'):
# C4996: '...': was declared deprecated
env.Append(CCFLAGS=['/wd4996'])
-
# Suppress `register` keyword warnings in FreeBSD builds
if env.TargetOSIs('freebsd'):
@@ -60,11 +58,9 @@ if env.TargetOSIs('freebsd'):
env.Append(CCFLAGS=['-Wno-register'])
i18nEnv = env.Clone()
-i18nEnv.Append(
- CPPDEFINES=[
- ('U_I18N_IMPLEMENTATION', 1),
- ],
-)
+i18nEnv.Append(CPPDEFINES=[
+ ('U_I18N_IMPLEMENTATION', 1),
+], )
i18nEnv.Library(
target='icu_i18n',
source=[
@@ -273,11 +269,9 @@ i18nEnv.Library(
)
commonEnv = env.Clone()
-commonEnv.Append(
- CPPDEFINES=[
- ('U_COMMON_IMPLEMENTATION', 1),
- ],
-)
+commonEnv.Append(CPPDEFINES=[
+ ('U_COMMON_IMPLEMENTATION', 1),
+], )
commonEnv.Library(
target='icu_common',
source=[
diff --git a/src/third_party/kms-message/SConscript b/src/third_party/kms-message/SConscript
index e54fd075efc..f33d82b9f0e 100644
--- a/src/third_party/kms-message/SConscript
+++ b/src/third_party/kms-message/SConscript
@@ -6,12 +6,14 @@ env = env.Clone()
env['CCFLAGS_WERROR'] = []
env['CXXFLAGS_WERROR'] = []
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
for to_remove in ["-Wsign-compare", "-Wall"]:
removeIfPresent(env['CCFLAGS'], to_remove)
removeIfPresent(env['CFLAGS'], to_remove)
diff --git a/src/third_party/libstemmer_c/SConscript b/src/third_party/libstemmer_c/SConscript
index 3ffc4aefea3..62a2d2ab301 100644
--- a/src/third_party/libstemmer_c/SConscript
+++ b/src/third_party/libstemmer_c/SConscript
@@ -37,13 +37,13 @@ stemming_packages = [
"UTF_8_french",
"UTF_8_norwegian",
"UTF_8_russian",
- ]
+]
stemmer_files = [
'runtime/api.c',
'libstemmer/libstemmer_utf8.c',
['src_c/stem_%s.c' % p for p in stemming_packages],
- ]
+]
myEnv = env.Clone()
if myEnv.ToolchainIs('GCC', 'clang'):
diff --git a/src/third_party/mozjs/SConscript b/src/third_party/mozjs/SConscript
index b649e28abba..a861c315d5f 100644
--- a/src/third_party/mozjs/SConscript
+++ b/src/third_party/mozjs/SConscript
@@ -3,52 +3,49 @@
Import([
"get_option",
"env",
- ])
+])
env = env.Clone()
env.InjectThirdParty(libraries=['zlib'])
env['CCFLAGS_WERROR'] = []
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
for to_remove in ['-Wall', '-W', '/W3', '-Wsign-compare', '/permissive-']:
removeIfPresent(env['CCFLAGS'], to_remove)
# See what -D's show up in make. The AB_CD one might change, but we're little
# endian only for now so I think it's sane
env.Prepend(CPPDEFINES=[
- ('IMPL_MFBT', 1),
- ('JS_USE_CUSTOM_ALLOCATOR', 1),
- ('STATIC_JS_API', 1),
- ('U_NO_DEFAULT_INCLUDE_UTF_HEADERS', 1),
- ('UCONFIG_NO_BREAK_ITERATION', 1),
- ('UCONFIG_NO_FORMATTING', 1),
- ('UCONFIG_NO_TRANSLITERATION', 1),
- ('UCONFIG_NO_REGULAR_EXPRESSIONS', 1),
- ('U_CHARSET_IS_UTF8', 1),
- ('U_DISABLE_RENAMING', 1),
- ('U_STATIC_IMPLEMENTATION', 1),
- ('U_USING_ICU_NAMESPACE', 0),
- ])
+ ('IMPL_MFBT', 1),
+ ('JS_USE_CUSTOM_ALLOCATOR', 1),
+ ('STATIC_JS_API', 1),
+ ('U_NO_DEFAULT_INCLUDE_UTF_HEADERS', 1),
+ ('UCONFIG_NO_BREAK_ITERATION', 1),
+ ('UCONFIG_NO_FORMATTING', 1),
+ ('UCONFIG_NO_TRANSLITERATION', 1),
+ ('UCONFIG_NO_REGULAR_EXPRESSIONS', 1),
+ ('U_CHARSET_IS_UTF8', 1),
+ ('U_DISABLE_RENAMING', 1),
+ ('U_STATIC_IMPLEMENTATION', 1),
+ ('U_USING_ICU_NAMESPACE', 0),
+])
if get_option('spider-monkey-dbg') == "on":
env.Prepend(CPPDEFINES=[
- 'DEBUG',
- 'JS_DEBUG',
- 'JS_GC_ZEAL',
- ])
-
+ 'DEBUG',
+ 'JS_DEBUG',
+ 'JS_GC_ZEAL',
+ ])
-env.Append(
- FORCEINCLUDES=[
- 'js-confdefs.h'
- ],
-)
+env.Append(FORCEINCLUDES=['js-confdefs.h'], )
if env.TargetOSIs('windows'):
env.Append(
@@ -79,16 +76,13 @@ if env.TargetOSIs('windows'):
# not enough arguments for function-like macro invocation
'/wd4003',
- ],
- )
+ ], )
else:
- env.Append(
- CXXFLAGS=[
- '-Wno-non-virtual-dtor',
- '-Wno-invalid-offsetof',
- '-Wno-sign-compare'
- ],
- )
+ env.Append(CXXFLAGS=[
+ '-Wno-non-virtual-dtor',
+ '-Wno-invalid-offsetof',
+ '-Wno-sign-compare',
+ ], )
# js/src, js/public and mfbt are the only required sources right now, that
# could change in the future
@@ -138,20 +132,19 @@ sources = [
if env.TargetOSIs('windows'):
sources.extend([
- "extract/mozglue/misc/ConditionVariable_windows.cpp",
- "extract/mozglue/misc/Mutex_windows.cpp",
- "extract/mozglue/misc/TimeStamp_windows.cpp",
+ "extract/mozglue/misc/ConditionVariable_windows.cpp",
+ "extract/mozglue/misc/Mutex_windows.cpp",
+ "extract/mozglue/misc/TimeStamp_windows.cpp",
])
else:
sources.extend([
- "extract/mozglue/misc/ConditionVariable_posix.cpp",
- "extract/mozglue/misc/Mutex_posix.cpp",
- "extract/mozglue/misc/TimeStamp_posix.cpp",
+ "extract/mozglue/misc/ConditionVariable_posix.cpp",
+ "extract/mozglue/misc/Mutex_posix.cpp",
+ "extract/mozglue/misc/TimeStamp_posix.cpp",
])
-
-
-sources.append( [ "extract/modules/fdlibm/{}".format(f) for f in [
+sources.append([
+ "extract/modules/fdlibm/{}".format(f) for f in [
'e_acos.cpp',
'e_acosh.cpp',
'e_asin.cpp',
@@ -192,7 +185,8 @@ sources.append( [ "extract/modules/fdlibm/{}".format(f) for f in [
's_tanh.cpp',
's_trunc.cpp',
's_truncf.cpp',
-]])
+ ]
+])
if env.TargetOSIs('windows'):
env.Prepend(CPPDEFINES=[
@@ -201,11 +195,9 @@ if env.TargetOSIs('windows'):
])
if env['TARGET_ARCH'] == 'x86_64':
- env.Prepend(CPPDEFINES=[
- ("WASM_HUGE_MEMORY", "1")
- ])
+ env.Prepend(CPPDEFINES=[("WASM_HUGE_MEMORY", "1")])
-sourceFilePatterns=[
+sourceFilePatterns = [
"/build/*.cpp",
"/build/jit/*.cpp",
"/build/gc/*.cpp",
@@ -249,8 +241,7 @@ env.Append(
('U_DISABLE_RENAMING', 1),
('U_STATIC_IMPLEMENTATION', 1),
('U_USING_ICU_NAMESPACE', 0),
- ],
-)
+ ], )
if env.TargetOSIs('solaris'):
# On Solaris, compile of certain files fails if the below define is not enabled. Specifically,
@@ -260,11 +251,10 @@ if env.TargetOSIs('solaris'):
# error because XPG4v2 programs are incompatible with C99. If we keep _XOPEN_SOURCE=600 but
# force _XOPEN_SOURCE_EXTENDED=0, then <sys/feature_tests.h> chooses XPG6, which resolves the
# error (since XPG6 is compatible with C99).
- env.Append(
- CPPDEFINES=[
- ('_XOPEN_SOURCE_EXTENDED', 0),
- ],
- )
+ env.Append(CPPDEFINES=[
+ ('_XOPEN_SOURCE_EXTENDED', 0),
+ ], )
+
def removeIfPresent(lst, item):
try:
@@ -272,6 +262,7 @@ def removeIfPresent(lst, item):
except ValueError:
pass
+
env['CCFLAGS_WERROR'] = []
for to_remove in ['-Wall', '-W']:
removeIfPresent(env['CCFLAGS'], to_remove)
@@ -284,7 +275,6 @@ if env.TargetOSIs('windows'):
# C4996: '...': was declared deprecated
env.Append(CCFLAGS=['/wd4996'])
-
# Suppress `register` keyword warnings in FreeBSD builds
if env.TargetOSIs('freebsd'):
diff --git a/src/third_party/pcre-8.42/SConscript b/src/third_party/pcre-8.42/SConscript
index 7abb80b2f24..c929cca304e 100644
--- a/src/third_party/pcre-8.42/SConscript
+++ b/src/third_party/pcre-8.42/SConscript
@@ -4,16 +4,20 @@ Import("env")
env = env.Clone()
-env.Append( CPPDEFINES=[ "HAVE_CONFIG_H", ] )
+env.Append(CPPDEFINES=[
+ "HAVE_CONFIG_H",
+])
env['CCFLAGS_WERROR'] = []
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
for to_remove in ['-Wall', '-W']:
removeIfPresent(env['CCFLAGS'], to_remove)
@@ -38,7 +42,9 @@ elif env.TargetOSIs('solaris'):
else:
env.Append(CPPPATH=["build_posix"])
-env.Library( "pcrecpp", [
+env.Library(
+ "pcrecpp",
+ [
# pcre
"pcre_byte_order.c",
"pcre_compile.c",
@@ -65,4 +71,5 @@ env.Library( "pcrecpp", [
"pcrecpp.cc",
"pcre_scanner.cc",
"pcre_stringpiece.cc",
- ] )
+ ],
+)
diff --git a/src/third_party/pcre2/SConscript b/src/third_party/pcre2/SConscript
index c9b3c5ccadc..c0956d85374 100644
--- a/src/third_party/pcre2/SConscript
+++ b/src/third_party/pcre2/SConscript
@@ -4,12 +4,10 @@ Import("env")
env = env.Clone()
-env.Append(
- CPPDEFINES=[
- ('PCRE2_CODE_UNIT_WIDTH', 8),
- 'HAVE_CONFIG_H',
- ],
-)
+env.Append(CPPDEFINES=[
+ ('PCRE2_CODE_UNIT_WIDTH', 8),
+ 'HAVE_CONFIG_H',
+], )
# Directories that include generated config.h for various platforms
#
@@ -61,5 +59,5 @@ env.Library(
"src/pcre2_tables.c",
"src/pcre2_ucd.c",
"src/pcre2_valid_utf.c",
- ],
- )
+ ],
+)
diff --git a/src/third_party/s2/SConscript b/src/third_party/s2/SConscript
index e348f4c51c5..a7f3fbf7bca 100644
--- a/src/third_party/s2/SConscript
+++ b/src/third_party/s2/SConscript
@@ -30,33 +30,38 @@ if env.TargetOSIs('windows'):
# conformant name: _memccpy. See online help for details
env.Append(CCFLAGS=['/wd4996'])
-env.Library( "s2",
- [
- "s1angle.cc",
- "s2.cc",
- "s2cellid.cc",
- "s2latlng.cc",
- "s1interval.cc",
- "s2cap.cc",
- "s2cell.cc",
- "s2cellunion.cc",
- "s2edgeindex.cc",
- "s2edgeutil.cc",
- "s2latlngrect.cc",
- "s2loop.cc",
- "s2pointregion.cc",
- "s2polygon.cc",
- "s2polygonbuilder.cc",
- "s2polyline.cc",
- "s2r2rect.cc",
- "s2region.cc",
- "s2regioncoverer.cc",
- "s2regionintersection.cc",
- "s2regionunion.cc",
- ], LIBDEPS=['$BUILD_DIR/third_party/s2/base/base_s2',
- '$BUILD_DIR/third_party/s2/strings/strings',
- '$BUILD_DIR/third_party/s2/util/coding/coding',
- '$BUILD_DIR/third_party/s2/util/math/math'])
+env.Library(
+ "s2",
+ [
+ "s1angle.cc",
+ "s2.cc",
+ "s2cellid.cc",
+ "s2latlng.cc",
+ "s1interval.cc",
+ "s2cap.cc",
+ "s2cell.cc",
+ "s2cellunion.cc",
+ "s2edgeindex.cc",
+ "s2edgeutil.cc",
+ "s2latlngrect.cc",
+ "s2loop.cc",
+ "s2pointregion.cc",
+ "s2polygon.cc",
+ "s2polygonbuilder.cc",
+ "s2polyline.cc",
+ "s2r2rect.cc",
+ "s2region.cc",
+ "s2regioncoverer.cc",
+ "s2regionintersection.cc",
+ "s2regionunion.cc",
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/third_party/s2/base/base_s2',
+ '$BUILD_DIR/third_party/s2/strings/strings',
+ '$BUILD_DIR/third_party/s2/util/coding/coding',
+ '$BUILD_DIR/third_party/s2/util/math/math',
+ ],
+)
#env.Program('r1interval_test', ['r1interval_test.cc'],
# LIBDEPS=['s2', '$BUILD_DIR/third_party/gtest/gtest_with_main'])
diff --git a/src/third_party/s2/base/SConscript b/src/third_party/s2/base/SConscript
index 8e437001e0e..bb01b237c97 100755
--- a/src/third_party/s2/base/SConscript
+++ b/src/third_party/s2/base/SConscript
@@ -8,12 +8,12 @@ env.Append(CCFLAGS=['-Isrc/third_party/s2'])
env.Library(
"base_s2",
- [
- "int128.cc",
- "logging.cc",
- "logging_mongo.cc",
- "stringprintf.cc",
- "strtoint.cc",
+ [
+ "int128.cc",
+ "logging.cc",
+ "logging_mongo.cc",
+ "stringprintf.cc",
+ "strtoint.cc",
],
LIBDEPS_PRIVATE=[
"$BUILD_DIR/mongo/base",
diff --git a/src/third_party/s2/strings/SConscript b/src/third_party/s2/strings/SConscript
index 2a44d9af587..0f8c601b73f 100755
--- a/src/third_party/s2/strings/SConscript
+++ b/src/third_party/s2/strings/SConscript
@@ -8,10 +8,10 @@ env.Append(CCFLAGS=['-Isrc/third_party/s2'])
env.Library(
"strings",
- [
- "split.cc",
- "stringprintf.cc",
- "strutil.cc",
+ [
+ "split.cc",
+ "stringprintf.cc",
+ "strutil.cc",
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
diff --git a/src/third_party/s2/util/coding/SConscript b/src/third_party/s2/util/coding/SConscript
index 8dc773bf5b6..bcb44680bc7 100755
--- a/src/third_party/s2/util/coding/SConscript
+++ b/src/third_party/s2/util/coding/SConscript
@@ -14,11 +14,12 @@ if env.TargetOSIs('windows'):
env.Library(
"coding",
- [
- "coder.cc",
- "varint.cc",
+ [
+ "coder.cc",
+ "varint.cc",
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/third_party/s2/base/base_s2',
- ])
+ ],
+)
diff --git a/src/third_party/s2/util/math/SConscript b/src/third_party/s2/util/math/SConscript
index b76175a95a0..f40d9e08bf4 100755
--- a/src/third_party/s2/util/math/SConscript
+++ b/src/third_party/s2/util/math/SConscript
@@ -14,11 +14,14 @@ elif env.TargetOSIs('windows'):
# C4305: 'initializing': truncation from '...' to '...'
env.Append(CCFLAGS=['/wd4305'])
-env.Library("math",
- [ "mathutil.cc",
- # "mathlimits.cc",
- ],
- LIBDEPS=[
- '$BUILD_DIR/mongo/base',
- '$BUILD_DIR/third_party/s2/base/base_s2',
- ])
+env.Library(
+ "math",
+ [
+ "mathutil.cc",
+ # "mathlimits.cc",
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/base',
+ '$BUILD_DIR/third_party/s2/base/base_s2',
+ ],
+)
diff --git a/src/third_party/snappy-1.1.7/SConscript b/src/third_party/snappy-1.1.7/SConscript
index 6102daab87e..8d134c7bea8 100644
--- a/src/third_party/snappy-1.1.7/SConscript
+++ b/src/third_party/snappy-1.1.7/SConscript
@@ -19,7 +19,9 @@ elif env.TargetOSIs('windows'):
elif not env.TargetOSIs('windows'):
env.Append(CPPPATH=["build_posix"])
-env.Append( CPPDEFINES=[ "HAVE_CONFIG_H", ] )
+env.Append(CPPDEFINES=[
+ "HAVE_CONFIG_H",
+])
if env.ToolchainIs('clang', 'GCC'):
env.Append(CCFLAGS=['-Wno-sign-compare', '-Wno-unused-function'])
@@ -30,4 +32,5 @@ env.Library(
'snappy-c.cc',
'snappy.cc',
'snappy-sinksource.cc',
- ])
+ ],
+)
diff --git a/src/third_party/tomcrypt-1.18.2/SConscript b/src/third_party/tomcrypt-1.18.2/SConscript
index 56bd05143f0..b5b548eaab3 100644
--- a/src/third_party/tomcrypt-1.18.2/SConscript
+++ b/src/third_party/tomcrypt-1.18.2/SConscript
@@ -2,18 +2,20 @@
Import([
"env",
- ])
+])
env = env.Clone()
env['CCFLAGS_WERROR'] = []
+
def removeIfPresent(lst, item):
try:
lst.remove(item)
except ValueError:
pass
+
for to_remove in ['-Wall', '-W']:
removeIfPresent(env['CCFLAGS'], to_remove)
@@ -21,15 +23,13 @@ env.Prepend(CPPPATH=[
'src/headers',
])
-env.Append(
- CPPDEFINES=[
- 'LTC_NO_PROTOTYPES',
- ]
-)
+env.Append(CPPDEFINES=[
+ 'LTC_NO_PROTOTYPES',
+])
env.Library(
target="tomcrypt",
- source= [
+ source=[
"src/ciphers/aes/aes.c",
"src/ciphers/aes/aes_tab.c",
"src/hashes/helper/hash_memory.c",
diff --git a/src/third_party/unwind/SConscript b/src/third_party/unwind/SConscript
index 643ae2c86bd..01b5c41d2a1 100644
--- a/src/third_party/unwind/SConscript
+++ b/src/third_party/unwind/SConscript
@@ -19,8 +19,7 @@ env = env.Clone(
# breaks that. It also seems to interfere with building with the
# sanitizers, which use linker maps that reference backtrace. This
# may be an ld.gold bug.
- DISALLOW_VISHIDDEN=True,
-)
+ DISALLOW_VISHIDDEN=True, )
unwind_root = env.Dir(".").srcnode()
unwind_platform = unwind_root.Dir("platform/${TARGET_OS}_${TARGET_ARCH}")
@@ -122,45 +121,41 @@ elif env['TARGET_ARCH'] == 's390x':
else:
env.FatalError(f"{env['TARGET_ARCH']} unsupported by libunwind.")
-env.Append(
- CCFLAGS=[
- '-fexceptions',
- '-Wno-unused-result',
- '-Wno-pointer-sign',
- '-Wno-incompatible-pointer-types',
- '-Wno-unused-variable',
- ])
+env.Append(CCFLAGS=[
+ '-fexceptions',
+ '-Wno-unused-result',
+ '-Wno-pointer-sign',
+ '-Wno-incompatible-pointer-types',
+ '-Wno-unused-variable',
+])
if env['TARGET_ARCH'] == 'ppc64le':
- env.Append(
- CCFLAGS=[
- '-Wno-unused-value'
- ])
+ env.Append(CCFLAGS=[
+ '-Wno-unused-value',
+ ])
if env.ToolchainIs('clang'):
env.Append(CCFLAGS=['-Wno-header-guard'])
-env.Append(
- CPPPATH=[
- unwind_platform.Dir("build/include"),
- unwind_root.Dir("dist/src"),
- unwind_root.Dir("dist/include"),
- unwind_root.Dir("dist/include/tdep-${UNWIND_TARGET_ARCH}"),
- ])
+env.Append(CPPPATH=[
+ unwind_platform.Dir("build/include"),
+ unwind_root.Dir("dist/src"),
+ unwind_root.Dir("dist/include"),
+ unwind_root.Dir("dist/include/tdep-${UNWIND_TARGET_ARCH}"),
+])
# propagates to consumers that inject (depend on) unwind.
env.RegisterConsumerModifications(
CPPPATH=[unwind_platform.Dir("install/include")],
- SYSLIBDEPS_PRIVATE=[env['LIBDEPS_LZMA_SYSLIBDEP']])
+ SYSLIBDEPS_PRIVATE=[env['LIBDEPS_LZMA_SYSLIBDEP']],
+)
-env.Append(
- SYSLIBDEPS_PRIVATE=[env['LIBDEPS_LZMA_SYSLIBDEP']])
+env.Append(SYSLIBDEPS_PRIVATE=[env['LIBDEPS_LZMA_SYSLIBDEP']])
-env.Append(
- CPPDEFINES=[
- 'HAVE_CONFIG_H',
- '_GNU_SOURCE',
- ])
+env.Append(CPPDEFINES=[
+ 'HAVE_CONFIG_H',
+ '_GNU_SOURCE',
+])
unwind_sources = unwind_common_sources + unwind_platform_sources
diff --git a/src/third_party/wiredtiger/SConscript b/src/third_party/wiredtiger/SConscript
index 8d2b67db115..58ee1ec818e 100644
--- a/src/third_party/wiredtiger/SConscript
+++ b/src/third_party/wiredtiger/SConscript
@@ -14,14 +14,14 @@ if endian == "big":
env.Append(CPPDEFINES=[('WORDS_BIGENDIAN', 1)])
env.Append(CPPPATH=[
- "src/include",
- ])
+ "src/include",
+])
# Enable asserts in debug builds
if debugBuild:
env.Append(CPPDEFINES=[
"HAVE_DIAGNOSTIC",
- ])
+ ])
# Enable optional rich logging
env.Append(CPPDEFINES=["HAVE_VERBOSE"])
@@ -29,28 +29,28 @@ env.Append(CPPDEFINES=["HAVE_VERBOSE"])
conf = Configure(env)
if conf.CheckFunc("fallocate"):
conf.env.Append(CPPDEFINES=[
- "HAVE_FALLOCATE"
+ "HAVE_FALLOCATE",
])
if conf.CheckFunc("sync_file_range"):
conf.env.Append(CPPDEFINES=[
- "HAVE_SYNC_FILE_RANGE"
+ "HAVE_SYNC_FILE_RANGE",
])
# GCC 8+ includes x86intrin.h in non-x64 versions of the compiler so limit the check to x64.
if env['TARGET_ARCH'] == 'x86_64' and conf.CheckCHeader('x86intrin.h'):
conf.env.Append(CPPDEFINES=[
- "HAVE_X86INTRIN_H"
+ "HAVE_X86INTRIN_H",
])
if conf.CheckCHeader('arm_neon.h'):
conf.env.Append(CPPDEFINES=[
- "HAVE_ARM_NEON_INTRIN_H"
+ "HAVE_ARM_NEON_INTRIN_H",
])
-env = conf.Finish();
+env = conf.Finish()
if env.TargetOSIs('windows'):
env.Append(CPPPATH=["build_win"])
env.Append(CFLAGS=[
- "/wd4090" # Ignore warning about mismatched const qualifiers
+ "/wd4090" # Ignore warning about mismatched const qualifiers
])
if env['MONGO_ALLOCATOR'] in ['tcmalloc', 'tcmalloc-experimental']:
env.InjectThirdParty(libraries=['gperftools'])
@@ -75,7 +75,7 @@ elif env.TargetOSIs('linux'):
env.Append(CPPDEFINES=["_GNU_SOURCE"])
else:
print("Wiredtiger is not supported on this platform. " +
- "Please generate an approriate wiredtiger_config.h")
+ "Please generate an approriate wiredtiger_config.h")
Exit(1)
useZlib = True
@@ -95,10 +95,8 @@ for l in open(File(version_file).srcnode().abspath):
if m and len(m.groups()) == 2:
exec('%s=%s' % (m.group(1), m.group(2)))
-if (VERSION_MAJOR == None or
- VERSION_MINOR == None or
- VERSION_PATCH == None or
- VERSION_STRING == None):
+if (VERSION_MAJOR == None or VERSION_MINOR == None or VERSION_PATCH == None
+ or VERSION_STRING == None):
print("Failed to find version variables in " + version_file)
Exit(1)
@@ -114,15 +112,22 @@ wiredtiger_includes = """
"""
wiredtiger_includes = textwrap.dedent(wiredtiger_includes)
replacements = {
- '@VERSION_MAJOR@' : VERSION_MAJOR,
- '@VERSION_MINOR@' : VERSION_MINOR,
- '@VERSION_PATCH@' : VERSION_PATCH,
- '@VERSION_STRING@' : '"' + VERSION_STRING + '"',
- '@uintmax_t_decl@': "",
- '@uintptr_t_decl@': "",
- '@off_t_decl@' : 'typedef int64_t wt_off_t;' if env.TargetOSIs('windows')
- else "typedef off_t wt_off_t;",
- '@wiredtiger_includes_decl@': wiredtiger_includes
+ '@VERSION_MAJOR@':
+ VERSION_MAJOR,
+ '@VERSION_MINOR@':
+ VERSION_MINOR,
+ '@VERSION_PATCH@':
+ VERSION_PATCH,
+ '@VERSION_STRING@':
+ '"' + VERSION_STRING + '"',
+ '@uintmax_t_decl@':
+ "",
+ '@uintptr_t_decl@':
+ "",
+ '@off_t_decl@':
+ 'typedef int64_t wt_off_t;' if env.TargetOSIs('windows') else "typedef off_t wt_off_t;",
+ '@wiredtiger_includes_decl@':
+ wiredtiger_includes,
}
wiredtiger_h = env.Substfile(
@@ -130,12 +135,13 @@ wiredtiger_h = env.Substfile(
source=[
'src/include/wiredtiger.in',
],
- SUBST_DICT=replacements)
+ SUBST_DICT=replacements,
+)
wiredtiger_ext_h = env.Install(
target='.',
source=[
- 'src/include/wiredtiger_ext.h'
+ 'src/include/wiredtiger_ext.h',
],
)
@@ -159,16 +165,16 @@ env.AutoInstall(
# included.
#
condition_map = {
- 'POSIX_HOST' : not env.TargetOSIs('windows'),
- 'WINDOWS_HOST' : env.TargetOSIs('windows'),
-
- 'ARM64_HOST' : env['TARGET_ARCH'] == 'aarch64',
- 'POWERPC_HOST' : env['TARGET_ARCH'] == 'ppc64le',
- 'RISCV64_HOST' : env['TARGET_ARCH'] == 'riscv64',
- 'X86_HOST' : env['TARGET_ARCH'] == 'x86_64',
- 'ZSERIES_HOST' : env['TARGET_ARCH'] == 's390x',
+ 'POSIX_HOST': not env.TargetOSIs('windows'),
+ 'WINDOWS_HOST': env.TargetOSIs('windows'),
+ 'ARM64_HOST': env['TARGET_ARCH'] == 'aarch64',
+ 'POWERPC_HOST': env['TARGET_ARCH'] == 'ppc64le',
+ 'RISCV64_HOST': env['TARGET_ARCH'] == 'riscv64',
+ 'X86_HOST': env['TARGET_ARCH'] == 'x86_64',
+ 'ZSERIES_HOST': env['TARGET_ARCH'] == 's390x',
}
+
def filtered_filelist(f, checksum):
for line in f:
file_cond = line.split()
@@ -178,6 +184,7 @@ def filtered_filelist(f, checksum):
if line.startswith('src/checksum/') == checksum:
yield file_cond[0]
+
filelistfile = 'dist/filelist'
with open(File(filelistfile).srcnode().abspath) as filelist:
wtsources = list(filtered_filelist(filelist, False))
diff --git a/src/third_party/yaml-cpp/SConscript b/src/third_party/yaml-cpp/SConscript
index 6e0d5ff0d05..1c5c658391e 100644
--- a/src/third_party/yaml-cpp/SConscript
+++ b/src/third_party/yaml-cpp/SConscript
@@ -45,4 +45,5 @@ env.Library(
"yaml-cpp/src/singledocparser.cpp",
"yaml-cpp/src/stream.cpp",
"yaml-cpp/src/tag.cpp",
- ])
+ ],
+)