summaryrefslogtreecommitdiff
path: root/site_scons
diff options
context:
space:
mode:
Diffstat (limited to 'site_scons')
-rw-r--r--site_scons/libdeps.py240
-rw-r--r--site_scons/mongo/__init__.py2
-rw-r--r--site_scons/mongo/generators.py16
-rw-r--r--site_scons/mongo/install_actions.py20
-rw-r--r--site_scons/mongo/pip_requirements.py12
-rw-r--r--site_scons/site_tools/abilink.py6
-rw-r--r--site_scons/site_tools/auto_archive.py24
-rw-r--r--site_scons/site_tools/auto_install_binaries.py120
-rw-r--r--site_scons/site_tools/ccache.py11
-rw-r--r--site_scons/site_tools/compilation_db.py22
-rw-r--r--site_scons/site_tools/distsrc.py50
-rw-r--r--site_scons/site_tools/forceincludes.py10
-rw-r--r--site_scons/site_tools/git_decider.py1
-rw-r--r--site_scons/site_tools/gziptool.py9
-rw-r--r--site_scons/site_tools/icecream.py130
-rwxr-xr-xsite_scons/site_tools/idl_tool.py29
-rw-r--r--site_scons/site_tools/incremental_link.py15
-rwxr-xr-xsite_scons/site_tools/jstoh.py6
-rw-r--r--site_scons/site_tools/mongo_benchmark.py7
-rw-r--r--site_scons/site_tools/mongo_integrationtest.py7
-rw-r--r--site_scons/site_tools/mongo_libfuzzer.py12
-rw-r--r--site_scons/site_tools/mongo_test_execution.py41
-rw-r--r--site_scons/site_tools/mongo_test_list.py7
-rw-r--r--site_scons/site_tools/mongo_unittest.py7
-rw-r--r--site_scons/site_tools/ninja.py242
-rw-r--r--site_scons/site_tools/separate_debug.py64
-rw-r--r--site_scons/site_tools/split_dwarf.py5
-rw-r--r--site_scons/site_tools/tapilink.py16
-rw-r--r--site_scons/site_tools/thin_archive.py6
-rw-r--r--site_scons/site_tools/validate_cache_dir.py51
-rw-r--r--site_scons/site_tools/vcredist.py51
-rw-r--r--site_scons/site_tools/xcode.py3
32 files changed, 622 insertions, 620 deletions
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py
index ee57adf15a3..767f2a7baf4 100644
--- a/site_scons/libdeps.py
+++ b/site_scons/libdeps.py
@@ -77,14 +77,13 @@ import SCons
from SCons.Script import COMMAND_LINE_TARGETS
-
class Constants:
Libdeps = "LIBDEPS"
LibdepsCached = "LIBDEPS_cached"
LibdepsDependents = "LIBDEPS_DEPENDENTS"
LibdepsGlobal = "LIBDEPS_GLOBAL"
LibdepsNoInherit = "LIBDEPS_NO_INHERIT"
- LibdepsInterface ="LIBDEPS_INTERFACE"
+ LibdepsInterface = "LIBDEPS_INTERFACE"
LibdepsPrivate = "LIBDEPS_PRIVATE"
LibdepsTags = "LIBDEPS_TAGS"
LibdepsTagExpansion = "LIBDEPS_TAG_EXPANSIONS"
@@ -210,7 +209,6 @@ class FlaggedLibdep:
result.append(next_contig_str)
-
class LibdepLinter:
"""
This class stores the rules for linting the libdeps. Using a decorator,
@@ -245,6 +243,7 @@ class LibdepLinter:
"""
funcs = {}
+
def linter_rule_func(func):
funcs[func.__name__] = func
return func
@@ -280,14 +279,17 @@ class LibdepLinter:
self.unique_libs = set()
self._libdeps_types_previous = dict()
-
# If we are in print mode, we will record some linting metrics,
# and print the results at the end of the build.
if self.__class__.print_linter_errors and not self.__class__.registered_linting_time:
import atexit
+
def print_linting_time():
print(f"Spent {self.__class__.linting_time} seconds linting libdeps.")
- print(f"Found {self.__class__.linting_infractions} issues out of {self.__class__.linting_rules_run} libdeps rules checked.")
+ print(
+ f"Found {self.__class__.linting_infractions} issues out of {self.__class__.linting_rules_run} libdeps rules checked."
+ )
+
atexit.register(print_linting_time)
self.__class__.registered_linting_time = True
@@ -303,16 +305,13 @@ class LibdepLinter:
return
start = self._start_timer()
- linter_rules = [
- getattr(self, linter_rule)
- for linter_rule in self.linter_rule.all
- ]
+ linter_rules = [getattr(self, linter_rule) for linter_rule in self.linter_rule.all]
for libdep in libdeps:
for linter_rule in linter_rules:
linter_rule(libdep)
- self._stop_timer(start, len(linter_rules)*len(libdeps))
+ self._stop_timer(start, len(linter_rules) * len(libdeps))
def final_checks(self):
# Build performance optimization if you
@@ -322,8 +321,7 @@ class LibdepLinter:
start = self._start_timer()
linter_rules = [
- getattr(self.__class__, rule)
- for rule in self.__class__.linter_final_check.all
+ getattr(self.__class__, rule) for rule in self.__class__.linter_final_check.all
]
for linter_rule in linter_rules:
@@ -372,11 +370,8 @@ class LibdepLinter:
return deps_dependents
def _get_deps_dependents_with_types(self, builder, type):
- return [
- (dependent[0], builder) if isinstance(dependent, tuple) else
- (dependent, builder)
- for dependent in self.env.get(type, [])
- ]
+ return [(dependent[0], builder) if isinstance(dependent, tuple) else (dependent, builder)
+ for dependent in self.env.get(type, [])]
@linter_rule
def linter_rule_leaf_node_no_deps(self, libdep):
@@ -403,8 +398,7 @@ class LibdepLinter:
self._raise_libdep_lint_exception(
textwrap.dedent(f"""\
{target_type} '{self.target[0]}' has dependency '{lib}' and is marked explicitly as a leaf node,
- and '{lib}' does not exempt itself as an exception to the rule."""
- ))
+ and '{lib}' does not exempt itself as an exception to the rule."""))
@linter_rule
def linter_rule_no_dangling_deps(self, libdep):
@@ -418,8 +412,10 @@ class LibdepLinter:
# Gather the DEPS_DEPENDENTS and store them for a final check to make sure they were
# eventually defined as being built by some builder
libdep_libbuilder = self.target[0].builder.get_name(self.env)
- deps_depends = self._get_deps_dependents_with_types(libdep_libbuilder, Constants.LibdepsDependents)
- deps_depends += self._get_deps_dependents_with_types("Program", Constants.ProgdepsDependents)
+ deps_depends = self._get_deps_dependents_with_types(libdep_libbuilder,
+ Constants.LibdepsDependents)
+ deps_depends += self._get_deps_dependents_with_types("Program",
+ Constants.ProgdepsDependents)
self.__class__.dangling_dep_dependents.update(deps_depends)
@linter_final_check
@@ -434,8 +430,7 @@ class LibdepLinter:
textwrap.dedent(f"""\
Found reverse dependency linked to node '{dep_node}'
which will never be built by any builder.
- Remove the reverse dependency or add a way to build it."""
- ))
+ Remove the reverse dependency or add a way to build it."""))
@linter_rule
def linter_rule_no_public_deps(self, libdep):
@@ -458,8 +453,7 @@ class LibdepLinter:
textwrap.dedent(f"""\
{target_type} '{self.target[0]}' has public dependency '{lib}'
while being marked as not allowed to have public dependencies
- and '{lib}' does not exempt itself."""
- ))
+ and '{lib}' does not exempt itself."""))
@linter_rule
def linter_rule_no_dups(self, libdep):
@@ -475,8 +469,7 @@ class LibdepLinter:
target_type = self.target[0].builder.get_name(self.env)
lib = os.path.basename(str(libdep))
self._raise_libdep_lint_exception(
- f"{target_type} '{self.target[0]}' links '{lib}' multiple times."
- )
+ f"{target_type} '{self.target[0]}' links '{lib}' multiple times.")
self.unique_libs.add(str(libdep))
@@ -513,15 +506,14 @@ class LibdepLinter:
return
if (self.target[0].builder.get_name(self.env) == "Program"
- and libdep.dependency_type not in (deptype.Global, deptype.Public)):
+ and libdep.dependency_type not in (deptype.Global, deptype.Public)):
lib = os.path.basename(str(libdep))
self._raise_libdep_lint_exception(
textwrap.dedent(f"""\
Program '{self.target[0]}' links non-public library '{lib}'
A 'Program' can only have {Constants.Libdeps} libs,
- not {Constants.LibdepsPrivate} or {Constants.LibdepsInterface}."""
- ))
+ not {Constants.LibdepsPrivate} or {Constants.LibdepsInterface}."""))
@linter_rule
def linter_rule_no_bidirectional_deps(self, libdep):
@@ -540,12 +532,13 @@ class LibdepLinter:
return
elif len(self._get_deps_dependents(libdep.target_node.env)) > 0:
- target_type = self.target[0].builder.get_name(self.env)
- lib = os.path.basename(str(libdep))
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ target_type = self.target[0].builder.get_name(self.env)
+ lib = os.path.basename(str(libdep))
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
{target_type} '{self.target[0]}' links directly to a reverse dependency node '{lib}'
No node can link directly to a node that has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}."""
- ))
+ ))
@linter_rule
def linter_rule_nonprivate_on_deps_dependents(self, libdep):
@@ -560,14 +553,15 @@ class LibdepLinter:
return
if (libdep.dependency_type != deptype.Private and libdep.dependency_type != deptype.Global
- and len(self._get_deps_dependents()) > 0):
+ and len(self._get_deps_dependents()) > 0):
target_type = self.target[0].builder.get_name(self.env)
lib = os.path.basename(str(libdep))
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
{target_type} '{self.target[0]}' links non-private libdep '{lib}' and has a reverse dependency.
A {target_type} can only have {Constants.LibdepsPrivate} depends if it has {Constants.LibdepsDependents} or {Constants.ProgdepsDependents}."""
- ))
+ ))
@linter_rule
def linter_rule_libdeps_must_be_list(self, libdep):
@@ -581,7 +575,8 @@ class LibdepLinter:
libdeps_vars = list(dep_type_to_env_var.values()) + [
Constants.LibdepsDependents,
- Constants.ProgdepsDependents]
+ Constants.ProgdepsDependents,
+ ]
for dep_type_val in libdeps_vars:
@@ -589,10 +584,11 @@ class LibdepLinter:
if not SCons.Util.is_List(libdeps_list):
target_type = self.target[0].builder.get_name(self.env)
- self._raise_libdep_lint_exception(textwrap.dedent(f"""\
+ self._raise_libdep_lint_exception(
+ textwrap.dedent(f"""\
Found non-list type '{libdeps_list}' while evaluating {dep_type_val[1]} for {target_type} '{self.target[0]}'
- {dep_type_val[1]} must be setup as a list."""
- ))
+ {dep_type_val[1]} must be setup as a list."""))
+
dependency_visibility_ignored = {
deptype.Global: deptype.Public,
@@ -615,6 +611,7 @@ dep_type_to_env_var = {
deptype.Private: Constants.LibdepsPrivate,
}
+
class DependencyCycleError(SCons.Errors.UserError):
"""Exception representing a cycle discovered in library dependencies."""
@@ -623,16 +620,17 @@ class DependencyCycleError(SCons.Errors.UserError):
self.cycle_nodes = [first_node]
def __str__(self):
- return "Library dependency cycle detected: " + " => ".join(
- str(n) for n in self.cycle_nodes
- )
+ return "Library dependency cycle detected: " + " => ".join(str(n) for n in self.cycle_nodes)
+
class LibdepLinterError(SCons.Errors.UserError):
"""Exception representing a discongruent usages of libdeps"""
+
class MissingSyslibdepError(SCons.Errors.UserError):
"""Exception representing a discongruent usages of libdeps"""
+
def _get_sorted_direct_libdeps(node):
direct_sorted = getattr(node.attributes, "libdeps_direct_sorted", None)
if direct_sorted is None:
@@ -808,9 +806,8 @@ def update_scanner(env, builder_name=None, debug=False):
print('\n')
return result
- builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
- )
+ builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
+ path_function=path_function)
def get_libdeps(source, target, env, for_signature, debug=False):
@@ -853,7 +850,8 @@ def get_syslibdeps(source, target, env, for_signature, debug=False, shared=True)
if deps is None:
# Get the syslibdeps for the current node
- deps = target[0].get_env().Flatten(copy.copy(target[0].get_env().get(Constants.SysLibdepsPrivate)) or [])
+ deps = target[0].get_env().Flatten(
+ copy.copy(target[0].get_env().get(Constants.SysLibdepsPrivate)) or [])
deps += target[0].get_env().Flatten(target[0].get_env().get(Constants.SysLibdeps) or [])
for lib in _get_libdeps(target[0]):
@@ -875,12 +873,12 @@ def get_syslibdeps(source, target, env, for_signature, debug=False, shared=True)
continue
if isinstance(syslib, str) and syslib.startswith(Constants.MissingLibdep):
- raise MissingSyslibdepError(textwrap.dedent(f"""\
+ raise MissingSyslibdepError(
+ textwrap.dedent(f"""\
LibdepsError:
Target '{str(target[0])}' depends on the availability of a
system provided library for '{syslib[len(Constants.MissingLibdep):]}',
- but no suitable library was found during configuration."""
- ))
+ but no suitable library was found during configuration."""))
deps.append(syslib)
@@ -946,15 +944,19 @@ def _get_node_with_ixes(env, node, node_builder_type):
node_with_ixes = SCons.Util.adjustixes(node, prefix, suffix)
return node_factory(node_with_ixes)
+
_get_node_with_ixes.node_type_ixes = dict()
+
def add_node_from(env, node):
env.GetLibdepsGraph().add_nodes_from([(
str(node.abspath),
{
NodeProps.bin_type.name: node.builder.get_name(env),
- })])
+ },
+ )])
+
def add_edge_from(env, from_node, to_node, visibility, direct):
@@ -963,8 +965,10 @@ def add_edge_from(env, from_node, to_node, visibility, direct):
to_node,
{
EdgeProps.direct.name: direct,
- EdgeProps.visibility.name: int(visibility)
- })])
+ EdgeProps.visibility.name: int(visibility),
+ },
+ )])
+
def add_libdeps_node(env, target, libdeps):
@@ -979,7 +983,8 @@ def add_libdeps_node(env, target, libdeps):
str(node.abspath),
str(libdep.target_node.abspath),
visibility=libdep.dependency_type,
- direct=True)
+ direct=True,
+ )
def get_libdeps_nodes(env, target, builder, debug=False, visibility_map=None):
@@ -1027,7 +1032,8 @@ def get_libdeps_nodes(env, target, builder, debug=False, visibility_map=None):
return libdeps
-def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_map=None, ignore_progdeps=False):
+def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_map=None,
+ ignore_progdeps=False):
"""SCons emitter that takes values from the LIBDEPS environment variable and
converts them to File node objects, binding correct path information into
those File objects.
@@ -1094,12 +1100,9 @@ def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_m
visibility = dependent[1]
dependent = dependent[0]
- dependentNode = _get_node_with_ixes(
- env, dependent, builder
- )
- _append_direct_libdeps(
- dependentNode, [dependency(target[0], visibility_map[visibility], dependent)]
- )
+ dependentNode = _get_node_with_ixes(env, dependent, builder)
+ _append_direct_libdeps(dependentNode,
+ [dependency(target[0], visibility_map[visibility], dependent)])
if not ignore_progdeps:
for dependent in env.get(Constants.ProgdepsDependents, []):
@@ -1112,12 +1115,9 @@ def libdeps_emitter(target, source, env, debug=False, builder=None, visibility_m
visibility = dependent[1]
dependent = dependent[0]
- dependentNode = _get_node_with_ixes(
- env, dependent, "Program"
- )
- _append_direct_libdeps(
- dependentNode, [dependency(target[0], visibility_map[visibility], dependent)]
- )
+ dependentNode = _get_node_with_ixes(env, dependent, "Program")
+ _append_direct_libdeps(dependentNode,
+ [dependency(target[0], visibility_map[visibility], dependent)])
return target, source
@@ -1157,7 +1157,7 @@ def expand_libdeps_for_link(source, target, env, for_signature):
# immediately turned back on
for switch_flag in getattr(flagged_libdep.libnode.attributes, 'libdeps_switch_flags', []):
if (prev_libdep and switch_flag['on'] in flagged_libdep.prefix_flags
- and switch_flag['off'] in prev_libdep.postfix_flags):
+ and switch_flag['off'] in prev_libdep.postfix_flags):
flagged_libdep.prefix_flags.remove(switch_flag['on'])
prev_libdep.postfix_flags.remove(switch_flag['off'])
@@ -1179,6 +1179,7 @@ def expand_libdeps_for_link(source, target, env, for_signature):
return libdeps_with_flags
+
def generate_libdeps_graph(env):
if env.get('SYMBOLDEPSSUFFIX', None):
@@ -1196,7 +1197,8 @@ def generate_libdeps_graph(env):
str(target_node.abspath),
str(direct_libdep.target_node.abspath),
visibility=int(direct_libdep.dependency_type),
- direct=True)
+ direct=True,
+ )
direct_libdeps.append(direct_libdep.target_node.abspath)
for libdep in _get_libdeps(target_node):
@@ -1207,45 +1209,58 @@ def generate_libdeps_graph(env):
str(target_node.abspath),
str(libdep.abspath),
visibility=int(deptype.Public),
- direct=False)
+ direct=False,
+ )
if env['PLATFORM'] == 'darwin':
sep = ' '
else:
sep = ':'
- ld_path = sep.join([os.path.dirname(str(libdep)) for libdep in _get_libdeps(target_node)])
- symbol_deps.append(env.Command(
- target=symbols_file,
- source=target_node,
- action=SCons.Action.Action(
- f'{find_symbols} $SOURCE "{ld_path}" $TARGET',
- "Generating $SOURCE symbol dependencies" if not env['VERBOSE'] else "")))
+ ld_path = sep.join(
+ [os.path.dirname(str(libdep)) for libdep in _get_libdeps(target_node)])
+ symbol_deps.append(
+ env.Command(
+ target=symbols_file,
+ source=target_node,
+ action=SCons.Action.Action(
+ f'{find_symbols} $SOURCE "{ld_path}" $TARGET',
+ "Generating $SOURCE symbol dependencies" if not env['VERBOSE'] else ""),
+ ))
def write_graph_hash(env, target, source):
with open(target[0].path, 'w') as f:
- json_str = json.dumps(networkx.readwrite.json_graph.node_link_data(env.GetLibdepsGraph()), sort_keys=True).encode('utf-8')
+ json_str = json.dumps(
+ networkx.readwrite.json_graph.node_link_data(env.GetLibdepsGraph()),
+ sort_keys=True).encode('utf-8')
f.write(hashlib.sha256(json_str).hexdigest())
- graph_hash = env.Command(target="$BUILD_DIR/libdeps/graph_hash.sha256",
- source=symbol_deps,
- action=SCons.Action.FunctionAction(
- write_graph_hash,
- {"cmdstr": None}))
- env.Depends(graph_hash, [
- env.File("#SConstruct")] +
- glob.glob("**/SConscript", recursive=True) +
- [os.path.abspath(__file__),
- env.File('$BUILD_DIR/mongo/util/version_constants.h')])
+ graph_hash = env.Command(
+ target="$BUILD_DIR/libdeps/graph_hash.sha256",
+ source=symbol_deps,
+ action=SCons.Action.FunctionAction(
+ write_graph_hash,
+ {"cmdstr": None},
+ ),
+ )
+ env.Depends(
+ graph_hash,
+ [env.File("#SConstruct")] + glob.glob("**/SConscript", recursive=True) +
+ [os.path.abspath(__file__),
+ env.File('$BUILD_DIR/mongo/util/version_constants.h')],
+ )
graph_node = env.Command(
target=env.get('LIBDEPS_GRAPH_FILE', None),
source=symbol_deps,
action=SCons.Action.FunctionAction(
generate_graph,
- {"cmdstr": "Generating libdeps graph"}))
+ {"cmdstr": "Generating libdeps graph"},
+ ),
+ )
env.Depends(graph_node, [graph_hash] + env.Glob("#buildscripts/libdeps/libdeps/*"))
+
def generate_graph(env, target, source):
libdeps_graph = env.GetLibdepsGraph()
@@ -1270,7 +1285,8 @@ def generate_graph(env, target, source):
libdeps_graph.add_edges_from([(
from_node,
to_node,
- {EdgeProps.symbols.name: " ".join(symbols[libdep]) })])
+ {EdgeProps.symbols.name: " ".join(symbols[libdep])},
+ )])
node = env.File(str(symbol_deps_file)[:-len(env['SYMBOLDEPSSUFFIX'])])
add_node_from(env, node)
@@ -1305,7 +1321,8 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
# configured.
env['LIBDEPS_GRAPH_ALIAS'] = env.Alias(
'generate-libdeps-graph',
- "${BUILD_DIR}/libdeps/libdeps.graphml")[0]
+ "${BUILD_DIR}/libdeps/libdeps.graphml",
+ )[0]
if str(env['LIBDEPS_GRAPH_ALIAS']) in COMMAND_LINE_TARGETS:
@@ -1323,24 +1340,26 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
if not env.WhereIs(bin):
env.FatalError(f"'{bin}' not found, Libdeps graph generation requires {bin}.")
-
# The find_symbols binary is a small fast C binary which will extract the missing
# symbols from the target library, and discover what linked libraries supply it. This
# setups the binary to be built.
find_symbols_env = env.Clone()
- find_symbols_env.VariantDir('${BUILD_DIR}/libdeps', 'buildscripts/libdeps', duplicate = 0)
+ find_symbols_env.VariantDir('${BUILD_DIR}/libdeps', 'buildscripts/libdeps', duplicate=0)
find_symbols_node = find_symbols_env.Program(
target='${BUILD_DIR}/libdeps/find_symbols',
source=['${BUILD_DIR}/libdeps/find_symbols.c'],
- CFLAGS=['-O3'])
+ CFLAGS=['-O3'],
+ )
# Here we are setting up some functions which will return single instance of the
# network graph and symbol deps list. We also setup some environment variables
# which are used along side the functions.
symbol_deps = []
+
def append_symbol_deps(env, symbol_deps_file):
env.Depends(env['LIBDEPS_GRAPH_FILE'], symbol_deps_file[0])
symbol_deps.append(symbol_deps_file)
+
env.AddMethod(append_symbol_deps, "AppendSymbolDeps")
env['LIBDEPS_SYMBOL_DEP_FILES'] = symbol_deps
@@ -1349,14 +1368,19 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
env["SYMBOLDEPSSUFFIX"] = '.symbol_deps'
libdeps_graph = LibdepsGraph()
- libdeps_graph.graph['invocation'] = " ".join([env['ESCAPE'](str(sys.executable))] + [env['ESCAPE'](arg) for arg in sys.argv])
+ libdeps_graph.graph['invocation'] = " ".join([env['ESCAPE'](str(sys.executable))] +
+ [env['ESCAPE'](arg) for arg in sys.argv])
libdeps_graph.graph['git_hash'] = env['MONGO_GIT_HASH']
libdeps_graph.graph['graph_schema_version'] = env['LIBDEPS_GRAPH_SCHEMA_VERSION']
libdeps_graph.graph['build_dir'] = env.Dir('$BUILD_DIR').path
- libdeps_graph.graph['deptypes'] = json.dumps({key: value[0] for key, value in deptype.__members__.items() if isinstance(value, tuple)})
+ libdeps_graph.graph['deptypes'] = json.dumps({
+ key: value[0]
+ for key, value in deptype.__members__.items() if isinstance(value, tuple)
+ })
def get_libdeps_graph(env):
return libdeps_graph
+
env.AddMethod(get_libdeps_graph, "GetLibdepsGraph")
# Now we will setup an emitter, and an additional action for several
@@ -1365,7 +1389,7 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
if "conftest" not in str(target[0]):
symbol_deps_file = env.File(str(target[0]) + env['SYMBOLDEPSSUFFIX'])
env.Depends(symbol_deps_file, '${BUILD_DIR}/libdeps/find_symbols')
- env.AppendSymbolDeps((symbol_deps_file,target[0]))
+ env.AppendSymbolDeps((symbol_deps_file, target[0]))
return target, source
@@ -1375,40 +1399,40 @@ def setup_environment(env, emitting_shared=False, debug='off', linting='on'):
new_emitter = SCons.Builder.ListEmitter([base_emitter, libdeps_graph_emitter])
builder.emitter = new_emitter
-
env.Append(
LIBDEPS_LIBEMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="StaticLibrary"),
+ builder="StaticLibrary",
+ ),
LIBEMITTER=lambda target, source, env: env["LIBDEPS_LIBEMITTER"](target, source, env),
LIBDEPS_SHAREMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="SharedArchive", ignore_progdeps=True),
+ builder="SharedArchive",
+ ignore_progdeps=True,
+ ),
SHAREMITTER=lambda target, source, env: env["LIBDEPS_SHAREMITTER"](target, source, env),
LIBDEPS_SHLIBEMITTER=partial(
libdeps_emitter,
debug=debug,
builder="SharedLibrary",
- visibility_map=dependency_visibility_honored
+ visibility_map=dependency_visibility_honored,
),
SHLIBEMITTER=lambda target, source, env: env["LIBDEPS_SHLIBEMITTER"](target, source, env),
LIBDEPS_PROGEMITTER=partial(
libdeps_emitter,
debug=debug,
- builder="SharedLibrary" if emitting_shared else "StaticLibrary"
+ builder="SharedLibrary" if emitting_shared else "StaticLibrary",
),
PROGEMITTER=lambda target, source, env: env["LIBDEPS_PROGEMITTER"](target, source, env),
)
env["_LIBDEPS_LIBS_FOR_LINK"] = expand_libdeps_for_link
- env["_LIBDEPS_LIBS"] = (
- "$LINK_LIBGROUP_START "
- "$_LIBDEPS_LIBS_FOR_LINK "
- "$LINK_LIBGROUP_END "
- )
+ env["_LIBDEPS_LIBS"] = ("$LINK_LIBGROUP_START "
+ "$_LIBDEPS_LIBS_FOR_LINK "
+ "$LINK_LIBGROUP_END ")
env.Prepend(_LIBFLAGS="$_LIBDEPS_TAGS $_LIBDEPS $_SYSLIBDEPS ")
for builder_name in ("Program", "SharedLibrary", "LoadableModule", "SharedArchive"):
diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py
index c8714cf68dd..cc744517b74 100644
--- a/site_scons/mongo/__init__.py
+++ b/site_scons/mongo/__init__.py
@@ -4,11 +4,13 @@
import bisect
+
def print_build_failures():
from SCons.Script import GetBuildFailures
for bf in GetBuildFailures():
print("%s failed: %s" % (bf.node, bf.errstr))
+
def insort_wrapper(target_list, target_string):
"""
Removes instances of empty list inside the list before handing it to insort.
diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py
index e2b401a5eae..da166ad875f 100644
--- a/site_scons/mongo/generators.py
+++ b/site_scons/mongo/generators.py
@@ -4,6 +4,7 @@ import hashlib
# Default and alternative generator definitions go here.
+
# This is the key/value mapping that will be returned by the buildInfo command and
# printed by the --version command-line option to mongod.
# Each mapped value is in turn a dict consisting of:
@@ -77,7 +78,7 @@ def default_buildinfo_environment_data():
),
)
return {
- k:{'key': k, 'value': v, 'inBuildInfo': ibi, 'inVersion': iv}
+ k: {'key': k, 'value': v, 'inBuildInfo': ibi, 'inVersion': iv}
for k, v, ibi, iv in data
}
@@ -109,11 +110,11 @@ def default_variant_dir_generator(target, source, env, for_signature):
# If our option hash yields a well known hash, replace it with its name.
known_variant_hashes = {
- '343e6678' : 'debug',
- '85fcf9b0' : 'opt',
- '981ce870' : 'debug',
- '9fface73' : 'optdebug',
- 'c52b1cc3' : 'opt',
+ '343e6678': 'debug',
+ '85fcf9b0': 'opt',
+ '981ce870': 'debug',
+ '9fface73': 'optdebug',
+ 'c52b1cc3': 'opt',
}
return known_variant_hashes.get(variant_dir, variant_dir)
@@ -122,4 +123,5 @@ def default_variant_dir_generator(target, source, env, for_signature):
def os_specific_variant_dir_generator(target, source, env, for_signature):
return '-'.join([
env['TARGET_OS'],
- default_variant_dir_generator(target, source, env, for_signature)])
+ default_variant_dir_generator(target, source, env, for_signature),
+ ])
diff --git a/site_scons/mongo/install_actions.py b/site_scons/mongo/install_actions.py
index c0eeac3a84e..8f3743299d3 100644
--- a/site_scons/mongo/install_actions.py
+++ b/site_scons/mongo/install_actions.py
@@ -5,30 +5,34 @@ import shutil
import stat
-
def _copy(src, dst):
shutil.copy2(src, dst)
st = os.stat(src)
os.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+
def _symlink(src, dst):
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
+
def _hardlink(src, dst):
try:
os.link(src, dst)
except:
_copy(src, dst)
+
available_actions = {
- "copy" : _copy,
- "hardlink" : _hardlink,
- "symlink" : _symlink,
+ "copy": _copy,
+ "hardlink": _hardlink,
+ "symlink": _symlink,
}
+
class _CopytreeError(EnvironmentError):
pass
+
def _generate_install_actions(base_action):
# This is a patched version of shutil.copytree from python 2.5. It
@@ -81,7 +85,6 @@ def _generate_install_actions(base_action):
if errors:
raise _CopytreeError(errors)
-
#
# Functions doing the actual work of the Install Builder.
#
@@ -92,7 +95,9 @@ def _generate_install_actions(base_action):
if os.path.isdir(source):
if os.path.exists(dest):
if not os.path.isdir(dest):
- raise SCons.Errors.UserError("cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source)))
+ raise SCons.Errors.UserError(
+ "cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest),
+ str(source)))
else:
parent = os.path.split(dest)[0]
if not os.path.exists(parent):
@@ -112,7 +117,8 @@ def _generate_install_actions(base_action):
required symlinks."""
if os.path.isdir(source):
- raise SCons.Errors.UserError("cannot install directory `%s' as a version library" % str(source) )
+ raise SCons.Errors.UserError(
+ "cannot install directory `%s' as a version library" % str(source))
else:
# remove the link if it is already there
try:
diff --git a/site_scons/mongo/pip_requirements.py b/site_scons/mongo/pip_requirements.py
index 5fd9b947b02..e7963b5a69a 100644
--- a/site_scons/mongo/pip_requirements.py
+++ b/site_scons/mongo/pip_requirements.py
@@ -28,11 +28,9 @@ def verify_requirements(requirements_file: str, silent: bool = False):
print(*args, **kwargs)
def raiseSuggestion(ex, pip_pkg):
- raise MissingRequirements(
- f"{ex}\n"
- f"Try running:\n"
- f" {sys.executable} -m pip install {pip_pkg}"
- ) from ex
+ raise MissingRequirements(f"{ex}\n"
+ f"Try running:\n"
+ f" {sys.executable} -m pip install {pip_pkg}") from ex
# Import the prequisites for this function, providing hints on failure.
try:
@@ -65,8 +63,8 @@ def verify_requirements(requirements_file: str, silent: bool = False):
except pkg_resources.ResolutionError as ex:
raiseSuggestion(
ex,
- f"-r {requirements_file}")
-
+ f"-r {requirements_file}",
+ )
verbose("Resolved to these distributions:")
for dist in sorted(set([f" {dist.key} {dist.version}" for dist in dists])):
diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py
index 3670ec24166..f57f63a711d 100644
--- a/site_scons/site_tools/abilink.py
+++ b/site_scons/site_tools/abilink.py
@@ -71,15 +71,15 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
def _add_action(builder):
actions = builder.action
builder.action = actions + SCons.Action.Action(
- "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw"
- )
+ "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw")
def exists(env):
diff --git a/site_scons/site_tools/auto_archive.py b/site_scons/site_tools/auto_archive.py
index b3c9ddd99a4..91cd0c282c7 100644
--- a/site_scons/site_tools/auto_archive.py
+++ b/site_scons/site_tools/auto_archive.py
@@ -76,9 +76,7 @@ def add_package_name_alias(env, component, role, name):
"""Add a package name mapping for the combination of component and role."""
# Verify we didn't get a None or empty string for any argument
if not name:
- raise Exception(
- "when setting a package name alias must provide a name parameter"
- )
+ raise Exception("when setting a package name alias must provide a name parameter")
if not component:
raise Exception("No component provided for package name alias")
if not role:
@@ -90,7 +88,8 @@ def get_package_name(env, component, role):
"""Return the package file name for the component and role combination."""
basename = env[PACKAGE_ALIAS_MAP].get(
# TODO: silent roles shouldn't be included here
- (component, role), "{component}-{role}".format(component=component, role=role)
+ (component, role),
+ "{component}-{role}".format(component=component, role=role),
)
return basename
@@ -234,11 +233,7 @@ def archive_builder(source, target, env, for_signature):
# Collect all the installed files for our entry. This is doing a pure DAG
# walk idea of what should be. So we filter out any that are not in the
# installed set.
- transitive_files = [
- f for f in
- collect_transitive_files(env, entry)
- if f in installed
- ]
+ transitive_files = [f for f in collect_transitive_files(env, entry) if f in installed]
if not transitive_files:
return []
@@ -258,7 +253,7 @@ def archive_builder(source, target, env, for_signature):
return "{prefix} {files}".format(
prefix=command_prefix,
- files=" ".join(relative_files)
+ files=" ".join(relative_files),
)
@@ -274,11 +269,11 @@ def generate(env):
action=SCons.Action.CommandGeneratorAction(
archive_builder,
{"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"},
- )
- )
+ ))
env.Append(BUILDERS={"AutoArchive": bld})
env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = env.get(
- "AUTO_ARCHIVE_TARBALL_SUFFIX", "tar.gz"
+ "AUTO_ARCHIVE_TARBALL_SUFFIX",
+ "tar.gz",
)
env["AUTO_ARCHIVE_ZIP_SUFFIX"] = env.get("AUTO_ARCHIVE_ZIP_SUFFIX", "zip")
env[PACKAGE_ALIAS_MAP] = {}
@@ -297,5 +292,4 @@ def generate(env):
"tar": (auto_archive_gen(env, make_archive_script, "tar"), False),
"zip": (auto_archive_gen(env, make_archive_script, "zip"), False),
"archive": (auto_archive_gen(env, make_archive_script, "auto"), False),
- }
- )
+ })
diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py
index c6429ad396f..55488465d0e 100644
--- a/site_scons/site_tools/auto_install_binaries.py
+++ b/site_scons/site_tools/auto_install_binaries.py
@@ -42,8 +42,10 @@ ROLE_DECLARATIONS = "AIB_ROLE_DECLARATIONS"
SUFFIX_MAP = "AIB_SUFFIX_MAP"
TASKS = "AIB_TASKS"
-
-SuffixMap = namedtuple("SuffixMap", ["directory", "default_role"],)
+SuffixMap = namedtuple(
+ "SuffixMap",
+ ["directory", "default_role"],
+)
class RoleInfo:
@@ -98,24 +100,17 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
for role in roles:
for d in role.dependencies:
if d not in role_names:
- raise Exception(
- "Role dependency '{}' does not name a declared role".format(d)
- )
+ raise Exception("Role dependency '{}' does not name a declared role".format(d))
if isinstance(base_role, str):
if base_role not in role_names:
raise Exception(
- "A base_role argument was provided but it does not name a declared role"
- )
+ "A base_role argument was provided but it does not name a declared role")
elif isinstance(base_role, DeclaredRole):
if base_role not in roles:
- raise Exception(
- "A base_role argument was provided but it is not a declared role"
- )
+ raise Exception("A base_role argument was provided but it is not a declared role")
elif base_role is not None:
- raise Exception(
- "The base_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The base_role argument must be a string name of a role or a role object")
else:
# Set it to something falsey
base_role = str()
@@ -123,17 +118,12 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
if isinstance(meta_role, str):
if meta_role not in role_names:
raise Exception(
- "A meta_role argument was provided but it does not name a declared role"
- )
+ "A meta_role argument was provided but it does not name a declared role")
elif isinstance(meta_role, DeclaredRole):
if meta_role not in roles:
- raise Exception(
- "A meta_role argument was provided but it is not a declared role"
- )
+ raise Exception("A meta_role argument was provided but it is not a declared role")
elif meta_role is not None:
- raise Exception(
- "The meta_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The meta_role argument must be a string name of a role or a role object")
else:
# Set it to something falsy
meta_role = str()
@@ -199,12 +189,7 @@ def get_alias_map_entry(env, component, role):
r_entry.dependencies.add(base_c_entry)
meta_role = env.get(META_ROLE)
- if (
- meta_role
- and role != meta_role
- and meta_component
- and component != meta_component
- ):
+ if (meta_role and role != meta_role and meta_component and component != meta_component):
meta_r_entry = get_alias_map_entry(env, component, meta_role)
meta_c_r_entry = get_alias_map_entry(env, meta_component, meta_role)
meta_c_r_entry.dependencies.add(meta_r_entry)
@@ -259,23 +244,15 @@ def scan_for_transitive_install(node, env, _path):
if component_base_entry.files:
results.update(component_base_entry.files)
- if (
- base_role
- and base_component
- and component != base_component
- and role != base_role
- ):
+ if (base_role and base_component and component != base_component and role != base_role):
base_base_entry = alias_map[base_component][base_role]
if base_base_entry.files:
results.update(base_base_entry.files)
- installed_children = set(
- grandchild
- for child in node.children()
- for direct_children in child.children()
- for grandchild in direct_children.get_executor().get_all_targets()
- if direct_children.get_executor() and grandchild.has_builder()
- )
+ installed_children = set(grandchild for child in node.children()
+ for direct_children in child.children()
+ for grandchild in direct_children.get_executor().get_all_targets()
+ if direct_children.get_executor() and grandchild.has_builder())
for child in installed_children:
auto_installed_files = get_auto_installed_files(env, child)
@@ -324,11 +301,8 @@ def tag_components(env, target, **kwargs):
raise Exception("AIB_COMPONENT must be a string and contain no whitespace.")
if component is None:
- raise Exception(
- "AIB_COMPONENT must be provided; untagged targets: {}".format(
- [t.path for t in target]
- )
- )
+ raise Exception("AIB_COMPONENT must be provided; untagged targets: {}".format(
+ [t.path for t in target]))
if role is None:
raise Exception("AIB_ROLE was not provided.")
@@ -344,11 +318,8 @@ def tag_components(env, target, **kwargs):
# component or base component. These cause dependency cycles because
# get_alias_map_entry will do that wiring for us then we will try to
# map them back on themselves in our loop.
- if (
- component != env.get(BASE_COMPONENT)
- and role != env.get(META_ROLE)
- and component != env.get(META_COMPONENT)
- ):
+ if (component != env.get(BASE_COMPONENT) and role != env.get(META_ROLE)
+ and component != env.get(META_COMPONENT)):
for component in kwargs.get(REVERSE_COMPONENT_DEPENDENCIES, []):
component_dep = get_alias_map_entry(env, component, role)
component_dep.dependencies.add(entry)
@@ -386,9 +357,7 @@ def auto_install_pseudobuilder(env, target, source, **kwargs):
auto_install_mapping = env[SUFFIX_MAP].get(suffix)
if not auto_install_mapping:
- raise Exception(
- "No target provided and no auto install mapping found for:", str(s)
- )
+ raise Exception("No target provided and no auto install mapping found for:", str(s))
target_for_source = auto_install_mapping.directory
@@ -449,14 +418,10 @@ def finalize_install_dependencies(env):
alias_name = generate_alias_name(env, component, role, task)
alias = env.Alias(alias_name, func(env, component, role))
if generate_dependent_aliases:
- dependent_aliases = env.Flatten(
- [
- env.Alias(
- generate_alias_name(env, d.component, d.role, task)
- )
- for d in info.dependencies
- ]
- )
+ dependent_aliases = env.Flatten([
+ env.Alias(generate_alias_name(env, d.component, d.role, task))
+ for d in info.dependencies
+ ])
env.Alias(alias, dependent_aliases)
@@ -499,11 +464,8 @@ def add_suffix_mapping(env, suffix, role=None):
"""Map suffix to role"""
if isinstance(suffix, str):
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role available roles are {}".format(
- role, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role available roles are {}".format(
+ role, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP][env.subst(suffix)] = role
if not isinstance(suffix, dict):
@@ -512,11 +474,8 @@ def add_suffix_mapping(env, suffix, role=None):
for _, mapping in suffix.items():
role = mapping.default_role
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role. Available roles are {}".format(
- target, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role. Available roles are {}".format(
+ target, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP].update({env.subst(key): value for key, value in suffix.items()})
@@ -536,6 +495,7 @@ def list_components(env, **kwargs):
for key in env[ALIAS_MAP]:
print("\t", key)
+
def list_hierarchical_aib_recursive(mapping, counter=0):
if counter == 0:
print(" " * counter, mapping.id)
@@ -582,7 +542,9 @@ def list_targets():
# dedup and sort targets
targets = sorted(list(set(targets)))
- print("The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod")
+ print(
+ "The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod"
+ )
tasks_str = ','.join(tasks)
print(f"TASK={{{tasks_str}}}")
roles_str = ','.join(roles)
@@ -618,14 +580,13 @@ def generate(env): # pylint: disable=too-many-statements
env[SUFFIX_MAP] = {}
env[ALIAS_MAP] = defaultdict(dict)
- env.AppendUnique(
- AIB_TASKS={
- "install": auto_install_task,
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "install": auto_install_task,
+ })
env.AddMethod(
- scan_for_transitive_install_pseudobuilder, "GetTransitivelyInstalledFiles"
+ scan_for_transitive_install_pseudobuilder,
+ "GetTransitivelyInstalledFiles",
)
env.AddMethod(get_role_declaration, "GetRoleDeclaration")
env.AddMethod(get_auto_installed_files, "GetAutoInstalledFiles")
@@ -664,5 +625,6 @@ def generate(env): # pylint: disable=too-many-statements
assert base_install_builder.target_scanner is None
base_install_builder.target_scanner = SCons.Scanner.Scanner(
- function=scan_for_transitive_install, path_function=None
+ function=scan_for_transitive_install,
+ path_function=None,
)
diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py
index 2a4b89015d5..dc7ca4cd1e3 100644
--- a/site_scons/site_tools/ccache.py
+++ b/site_scons/site_tools/ccache.py
@@ -75,7 +75,9 @@ def exists(env):
if validated:
env['CCACHE_VERSION'] = ccache_version
else:
- print(f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}")
+ print(
+ f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}"
+ )
return validated
@@ -147,10 +149,8 @@ def generate(env):
# compiler parameter and differences in the file need to be accounted for in the
# hash result to prevent erroneous cache hits.
if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]:
- env["ENV"]["CCACHE_EXTRAFILES"] = ":".join([
- denyfile.path
- for denyfile in env["CCACHE_EXTRAFILES"]
- ])
+ env["ENV"]["CCACHE_EXTRAFILES"] = ":".join(
+ [denyfile.path for denyfile in env["CCACHE_EXTRAFILES"]])
# Make a generator to expand to CCACHE in the case where we are
# not a conftest. We don't want to use ccache for configure tests
@@ -165,6 +165,7 @@ def generate(env):
if "conftest" not in str(target[0]):
return '$CCACHE'
return ''
+
env['CCACHE_GENERATOR'] = ccache_generator
# Add ccache to the relevant command lines. Wrap the reference to
diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py
index 833be4a7c22..7e26b91d258 100644
--- a/site_scons/site_tools/compilation_db.py
+++ b/site_scons/site_tools/compilation_db.py
@@ -142,7 +142,11 @@ def WriteCompilationDb(target, source, env):
with open(str(target[0]), "w") as target_file:
json.dump(
- entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
+ entries,
+ target_file,
+ sort_keys=True,
+ indent=4,
+ separators=(",", ": "),
)
@@ -155,7 +159,8 @@ def generate(env, **kwargs):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
env["COMPILATIONDB_COMSTR"] = kwargs.get(
- "COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
+ "COMPILATIONDB_COMSTR",
+ "Building compilation database $TARGET",
)
components_by_suffix = itertools.chain(
@@ -181,18 +186,19 @@ def generate(env, **kwargs):
# Assumes a dictionary emitter
emitter = builder.emitter[suffix]
- builder.emitter[suffix] = SCons.Builder.ListEmitter(
- [emitter, makeEmitCompilationDbEntry(command),]
- )
+ builder.emitter[suffix] = SCons.Builder.ListEmitter([
+ emitter,
+ makeEmitCompilationDbEntry(command),
+ ])
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
- action=SCons.Action.Action(CompilationDbEntryAction, None),
- )
+ action=SCons.Action.Action(CompilationDbEntryAction, None), )
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
target_scanner=SCons.Scanner.Scanner(
- function=ScanCompilationDb, node_class=None
+ function=ScanCompilationDb,
+ node_class=None,
),
)
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
index 95200775bfd..83f47f2ab3f 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -61,7 +61,10 @@ class DistSrcArchive:
)
elif filename.endswith("zip"):
return DistSrcZipArchive(
- "zip", zipfile.ZipFile(filename, "a"), filename, "a",
+ "zip",
+ zipfile.ZipFile(filename, "a"),
+ filename,
+ "a",
)
def close(self):
@@ -89,13 +92,13 @@ class DistSrcTarArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -109,7 +112,9 @@ class DistSrcTarArchive(DistSrcArchive):
if self.archive_mode == "r":
self.archive_file.close()
self.archive_file = tarfile.open(
- self.archive_name, "a", format=tarfile.PAX_FORMAT,
+ self.archive_name,
+ "a",
+ format=tarfile.PAX_FORMAT,
)
self.archive_mode = "a"
self.archive_file.addfile(file_metadata, fileobj=file_buf)
@@ -141,13 +146,13 @@ class DistSrcZipArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -187,15 +192,14 @@ def distsrc_action_generator(source, target, env, for_signature):
print("Invalid file format for distsrc. Must be tar or zip file")
env.Exit(1)
- git_cmd = (
- '"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD'
- % (git_path, target_ext, target[0])
- )
+ git_cmd = ('"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD' %
+ (git_path, target_ext, target[0]))
return [
SCons.Action.Action(git_cmd, "Running git archive for $TARGET"),
SCons.Action.Action(
- run_distsrc_callbacks, "Running distsrc callbacks for $TARGET"
+ run_distsrc_callbacks,
+ "Running distsrc callbacks for $TARGET",
),
]
@@ -206,9 +210,7 @@ def add_callback(env, fn):
def generate(env, **kwargs):
env.AddMethod(add_callback, "AddDistSrcCallback")
- env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(
- generator=distsrc_action_generator,
- )
+ env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(generator=distsrc_action_generator, )
def DistSrc(env, target, **kwargs):
result = env.__DISTSRC(target=target, source=[], **kwargs)
diff --git a/site_scons/site_tools/forceincludes.py b/site_scons/site_tools/forceincludes.py
index 6d535bf0ba0..7807ca19f7e 100644
--- a/site_scons/site_tools/forceincludes.py
+++ b/site_scons/site_tools/forceincludes.py
@@ -22,6 +22,7 @@
import SCons
+
def _add_scanner(builder):
# We are taking over the target scanner here. If we want to not do
# that we need to invent a ListScanner concept to inject. What if
@@ -35,7 +36,9 @@ def _add_scanner(builder):
# If all nodes could not be resolved, there are missing headers.
if not all(fis):
- missing_headers = [header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node]
+ missing_headers = [
+ header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node
+ ]
errstring = f"Could not find force include header(s): {missing_headers} in any path in CPPPATH:\n"
for cpppath in env.get('CPPPATH', []):
errstring += f"\t{env.Dir(cpppath).path}\n"
@@ -60,6 +63,7 @@ def _add_scanner(builder):
argument=builder.source_scanner,
)
+
def generate(env, **kwargs):
if not 'FORCEINCLUDEPREFIX' in env:
if 'msvc' in env.get('TOOLS', []):
@@ -82,11 +86,11 @@ def generate(env, **kwargs):
# would enable discovery.
CCFLAGS=[
'$_FORCEINCLUDES',
- ]
- )
+ ])
for object_builder in SCons.Tool.createObjBuilders(env):
_add_scanner(object_builder)
+
def exists(env):
return True
diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py
index 0cb219edc5e..b092b743236 100644
--- a/site_scons/site_tools/git_decider.py
+++ b/site_scons/site_tools/git_decider.py
@@ -20,6 +20,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
+
def generate(env, **kwargs):
# Grab the existing decider functions out of the environment
diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py
index 6d6b0099f97..8f136642dd4 100644
--- a/site_scons/site_tools/gziptool.py
+++ b/site_scons/site_tools/gziptool.py
@@ -34,10 +34,13 @@ def GZipAction(target, source, env, **kw):
def generate(env, **kwargs):
env["BUILDERS"]["__GZIPTOOL"] = SCons.Builder.Builder(
- action=SCons.Action.Action(GZipAction, "$GZIPTOOL_COMSTR")
- )
+ action=SCons.Action.Action(
+ GZipAction,
+ "$GZIPTOOL_COMSTR",
+ ))
env["GZIPTOOL_COMSTR"] = kwargs.get(
- "GZIPTOOL_COMSTR", "Compressing $TARGET with gzip"
+ "GZIPTOOL_COMSTR",
+ "Compressing $TARGET with gzip",
)
def GZipTool(env, target, source, **kwargs):
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index c95a542f008..f7ce0ecd23c 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -87,9 +87,8 @@ def generate(env):
# icecc lower then 1.1 supports addfile remapping accidentally
# and above it adds an empty cpuinfo so handle cpuinfo issues for icecream
# below version 1.1
- if (env['ICECREAM_VERSION'] <= parse_version('1.1')
- and env.ToolchainIs("clang")
- and os.path.exists('/proc/cpuinfo')):
+ if (env['ICECREAM_VERSION'] <= parse_version('1.1') and env.ToolchainIs("clang")
+ and os.path.exists('/proc/cpuinfo')):
env.AppendUnique(ICECC_CREATE_ENV_ADDFILES=[('/proc/cpuinfo', '/dev/null')])
# Absoluteify, so we can derive ICERUN
@@ -115,9 +114,10 @@ def generate(env):
env["CXX"] = env.WhereIs("$CXX")
# Set up defaults for configuration options
- env['ICECREAM_TARGET_DIR'] = env.Dir(
- env.get('ICECREAM_TARGET_DIR', '#./.icecream')
- )
+ env['ICECREAM_TARGET_DIR'] = env.Dir(env.get(
+ 'ICECREAM_TARGET_DIR',
+ '#./.icecream',
+ ), )
verbose = env.get('ICECREAM_VERBOSE', False)
env['ICECC_DEBUG'] = env.get('ICECC_DEBUG', False)
@@ -125,9 +125,7 @@ def generate(env):
# environment doesn't need to see or know about. Make a custom env
# that we use consistently from here to where we end up setting
# ICECREAM_RUN_ICECC in the user env.
- setupEnv = env.Clone(
- NINJA_SKIP=True
- )
+ setupEnv = env.Clone(NINJA_SKIP=True)
if 'ICECC_VERSION' in setupEnv and bool(setupEnv['ICECC_VERSION']):
@@ -161,7 +159,8 @@ def generate(env):
source=[setupEnv.Value(quoted)],
action=SCons.Action.Action(
f"{cmdstr} -o $TARGET $ICECC_VERSION_URL",
- "Downloading compiler package from $ICECC_VERSION_URL" if not verbose else str(),
+ "Downloading compiler package from $ICECC_VERSION_URL"
+ if not verbose else str(),
),
)[0]
@@ -171,8 +170,8 @@ def generate(env):
if not icecc_version_file.exists():
raise Exception(
- 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
- )
+ 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(
+ icecc_version_file, ))
# This is what we are going to call the file names as known to SCons on disk
setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name
@@ -180,27 +179,27 @@ def generate(env):
else:
setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get(
- "ICECC_COMPILER_TYPE", os.path.basename(setupEnv.WhereIs("${CC}"))
+ "ICECC_COMPILER_TYPE",
+ os.path.basename(setupEnv.WhereIs("${CC}")),
)
# This is what we are going to call the file names as known to SCons on disk. We do the
# subst early so that we can call `replace` on the result.
setupEnv["ICECC_VERSION_ID"] = setupEnv.subst(
- "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_"
- )
+ "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_")
setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID",
source=[
"$ICECC_CREATE_ENV",
"$CC",
- "$CXX"
+ "$CXX",
],
action=SCons.Action.Action(
icecc_create_env,
"Generating icecream compiler package: $TARGET" if not verbose else str(),
generator=True,
- )
+ ),
)[0]
# At this point, all paths above have produced a file of some sort. We now move on
@@ -234,38 +233,37 @@ def generate(env):
# file as found on the users filesystem or from
# icecc-create-env. We put the absolute path to that filename into
# a file that we can read from.
- icecc_version_info = setupEnv.File(setupEnv.Command(
- target=[
- '${ICECREAM_TARGET_BASE}.sha256',
- '${ICECREAM_TARGET_BASE}.sha256.path',
- ],
- source=icecc_version_file,
- action=SCons.Action.ListAction(
- [
-
- # icecc-create-env run twice with the same input will
- # create files with identical contents, and identical
- # filenames, but with different hashes because it
- # includes timestamps. So we compute a new hash based
- # on the actual stream contents of the file by
- # untarring it into shasum.
- SCons.Action.Action(
- "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
- "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
- "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
- "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" if not verbose else str(),
- )
+ icecc_version_info = setupEnv.File(
+ setupEnv.Command(
+ target=[
+ '${ICECREAM_TARGET_BASE}.sha256',
+ '${ICECREAM_TARGET_BASE}.sha256.path',
],
- )
- ))
+ source=icecc_version_file,
+ action=SCons.Action.ListAction(
+ [
+
+ # icecc-create-env run twice with the same input will
+ # create files with identical contents, and identical
+ # filenames, but with different hashes because it
+ # includes timestamps. So we compute a new hash based
+ # on the actual stream contents of the file by
+ # untarring it into shasum.
+ SCons.Action.Action(
+ "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
+ "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
+ "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
+ "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}"
+ if not verbose else str(),
+ ),
+ ], ),
+ ), )
# We can't allow these to interact with the cache because the
# second action produces a file unknown to SCons. If caching were
@@ -280,13 +278,11 @@ def generate(env):
# wrapper script.
icecc_version_string_value = setupEnv.Command(
target=setupEnv.Value(None),
- source=[
- icecc_version_info[1]
- ],
+ source=[icecc_version_info[1]],
action=SCons.Action.Action(
lambda env, target, source: target[0].write(source[0].get_text_contents()),
"Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(),
- )
+ ),
)[0]
def icecc_version_string_generator(source, target, env, for_signature):
@@ -319,9 +315,9 @@ def generate(env):
'',
],
SUBST_DICT={
- '@icecc@' : '$ICECC',
- '@icecc_version@' : '$ICECC_VERSION',
- '@icecc_version_arch@' : icecc_version_arch_string,
+ '@icecc@': '$ICECC',
+ '@icecc_version@': '$ICECC_VERSION',
+ '@icecc_version_arch@': icecc_version_arch_string,
},
# Don't change around the suffixes
@@ -333,7 +329,7 @@ def generate(env):
# so that it knows to invoke SCons to produce it as part of
# TEMPLATE expansion. Since we have set NINJA_SKIP=True for
# setupEnv, we need to reverse that here.
- NINJA_SKIP=False
+ NINJA_SKIP=False,
)
setupEnv.AddPostAction(
@@ -405,8 +401,7 @@ def generate(env):
continue
base = emitterdict[suffix]
emitterdict[suffix] = SCons.Builder.ListEmitter(
- [base, icecc_toolchain_dependency_emitter]
- )
+ [base, icecc_toolchain_dependency_emitter], )
# Check whether ccache is requested and is a valid tool.
if "CCACHE" in env:
@@ -479,10 +474,10 @@ def generate(env):
shell_env = existing_gen(env, target, source)
else:
shell_env = env['ENV'].copy()
- shell_env['CCACHE_PREFIX'] = env.File(env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
+ shell_env['CCACHE_PREFIX'] = env.File(
+ env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
return shell_env
-
env['SHELL_ENV_GENERATOR'] = icecc_ccache_prefix_gen
else:
@@ -508,9 +503,10 @@ def generate(env):
# jobs, figure out what sort they are and extend this part of the
# setup.
def icerun_generator(target, source, env, for_signature):
- if "conftest" not in str(target[0]):
- return '$ICERUN'
- return ''
+ if "conftest" not in str(target[0]):
+ return '$ICERUN'
+ return ''
+
env['ICERUN_GENERATOR'] = icerun_generator
icerun_commands = [
@@ -575,7 +571,9 @@ def exists(env):
else:
icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
if not icecc_create_env_bin:
- print(f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected")
+ print(
+ f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected"
+ )
for line in pipe.stdout:
line = line.decode("utf-8")
@@ -594,6 +592,8 @@ def exists(env):
if validated:
env['ICECREAM_VERSION'] = icecc_version
else:
- print(f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}")
+ print(
+ f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}"
+ )
return validated
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
index 04b0db0cd62..dd3b5b65ee3 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""IDL Compiler Scons Tool."""
import os.path
@@ -39,9 +38,7 @@ def idlc_emitter(target, source, env):
first_source = str(source[0])
if not first_source.endswith(".idl"):
- raise ValueError(
- "Bad idl file name '%s', it must end with '.idl' " % (first_source)
- )
+ raise ValueError("Bad idl file name '%s', it must end with '.idl' " % (first_source))
base_file_name, _ = SCons.Util.splitext(str(target[0]))
target_source = env.File(base_file_name + "_gen.cpp")
@@ -79,16 +76,12 @@ def idl_scanner(node, env, path):
try:
with open(str(node), encoding="utf-8") as file_stream:
- parsed_doc = idlc.parser.parse(
- file_stream, str(node), resolver
- )
+ parsed_doc = idlc.parser.parse(file_stream, str(node), resolver)
except OSError:
return nodes_deps_list
if not parsed_doc.errors and parsed_doc.spec.imports is not None:
- nodes_deps_list.extend(
- [env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)]
- )
+ nodes_deps_list.extend([env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)])
setattr(node.attributes, "IDL_NODE_DEPS", nodes_deps_list)
return nodes_deps_list
@@ -122,20 +115,20 @@ def generate(env):
env["IDLC"] = "$PYTHON buildscripts/idl/idlc.py"
base_dir = env.Dir("$BUILD_DIR").path
env["IDLCFLAGS"] = [
- "--include", "src",
- "--base_dir", base_dir,
- "--target_arch", "$TARGET_ARCH",
+ "--include",
+ "src",
+ "--base_dir",
+ base_dir,
+ "--target_arch",
+ "$TARGET_ARCH",
]
env["IDLCCOM"] = "$IDLC $IDLCFLAGS --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES"
env["IDLCCOMSTR"] = ("Generating ${TARGETS[0]}"
- if not env.get("VERBOSE", "").lower() in ['true', '1']
- else None)
+ if not env.get("VERBOSE", "").lower() in ['true', '1'] else None)
env["IDLCSUFFIX"] = ".idl"
global IDL_GLOBAL_DEPS
- IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob(
- "#buildscripts/idl/idl/*.py"
- )
+ IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob("#buildscripts/idl/idl/*.py")
env["IDL_HAS_INLINE_DEPENDENCIES"] = True
diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py
index ebcf3a87dcc..9a9cf3748c4 100644
--- a/site_scons/site_tools/incremental_link.py
+++ b/site_scons/site_tools/incremental_link.py
@@ -32,9 +32,10 @@ def generate(env):
builders = env["BUILDERS"]
for builder in ("Program", "SharedLibrary", "LoadableModule"):
emitter = builders[builder].emitter
- builders[builder].emitter = SCons.Builder.ListEmitter(
- [emitter, _tag_as_precious,]
- )
+ builders[builder].emitter = SCons.Builder.ListEmitter([
+ emitter,
+ _tag_as_precious,
+ ])
def exists(env):
@@ -46,12 +47,8 @@ def exists(env):
# On posix platforms, excluding darwin, we may have enabled
# incremental linking. Check for the relevant flags.
- if (
- env.TargetOSIs("posix")
- and not env.TargetOSIs("darwin")
- and "-fuse-ld=gold" in env["LINKFLAGS"]
- and "-Wl,--incremental" in env["LINKFLAGS"]
- ):
+ if (env.TargetOSIs("posix") and not env.TargetOSIs("darwin")
+ and "-fuse-ld=gold" in env["LINKFLAGS"] and "-Wl,--incremental" in env["LINKFLAGS"]):
return True
return False
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
index 912c495891f..adcb69ed2c5 100755
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -52,10 +52,8 @@ def jsToHeader(target, source):
h.append("0};")
# symbols aren't exported w/o this
h.append("extern const JSFile %s;" % objname)
- h.append(
- 'const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };'
- % (objname, filename.replace("\\", "/"), stringname, stringname)
- )
+ h.append('const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };' %
+ (objname, filename.replace("\\", "/"), stringname, stringname))
h.append("} // namespace JSFiles")
h.append("} // namespace mongo")
diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py
index 5fe35b038d3..e52e03a6809 100644
--- a/site_scons/site_tools/mongo_benchmark.py
+++ b/site_scons/site_tools/mongo_benchmark.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering benchmarks.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -54,9 +54,8 @@ def build_benchmark(env, target, source, **kwargs):
benchmark_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- benchmark_test_components
- )
+ benchmark_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(benchmark_test_components)
kwargs["AIB_COMPONENTS_EXTRA"] = list(benchmark_test_components)
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index cbaadeb610d..af400ab805e 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering integration tests.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -48,9 +48,8 @@ def build_cpp_integration_test(env, target, source, **kwargs):
integration_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- integration_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(integration_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(integration_test_components)
diff --git a/site_scons/site_tools/mongo_libfuzzer.py b/site_scons/site_tools/mongo_libfuzzer.py
index bcbc0412688..90a0db807c7 100644
--- a/site_scons/site_tools/mongo_libfuzzer.py
+++ b/site_scons/site_tools/mongo_libfuzzer.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering libfuzzer tests.
"""
from SCons.Script import Action
@@ -48,17 +47,14 @@ def build_cpp_libfuzzer_test(env, target, source, **kwargs):
myenv.Prepend(LINKFLAGS=[sanitizer_option])
libfuzzer_test_components = {"tests", "fuzzertests"}
- if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith(
- "-fuzzertest"
- ):
+ if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith("-fuzzertest"):
kwargs["AIB_COMPONENT"] += "-fuzzertest"
if "AIB_COMPONENTS_EXTRA" in kwargs:
- libfuzzer_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- libfuzzer_test_components
- )
+ libfuzzer_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(libfuzzer_test_components)
- kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
+ kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
# Fuzzer tests are inherenently undecidable (see
# mongo_test_execution.py for details on undecidability).
diff --git a/site_scons/site_tools/mongo_test_execution.py b/site_scons/site_tools/mongo_test_execution.py
index 2527ba63a12..5c233ef97d0 100644
--- a/site_scons/site_tools/mongo_test_execution.py
+++ b/site_scons/site_tools/mongo_test_execution.py
@@ -29,6 +29,7 @@ import auto_install_binaries
_proof_scanner_cache_key = "proof_scanner_cache"
_associated_proof = "associated_proof_key"
+
def proof_generator_command_scanner_func(node, env, path):
results = getattr(node.attributes, _proof_scanner_cache_key, None)
if results is not None:
@@ -37,20 +38,22 @@ def proof_generator_command_scanner_func(node, env, path):
setattr(node.attributes, _proof_scanner_cache_key, results)
return results
+
proof_generator_command_scanner = SCons.Scanner.Scanner(
function=proof_generator_command_scanner_func,
path_function=None,
- recursive=True
+ recursive=True,
)
+
def auto_prove_task(env, component, role):
entry = auto_install_binaries.get_alias_map_entry(env, component, role)
return [
- getattr(f.attributes, _associated_proof)
- for f in entry.files
+ getattr(f.attributes, _associated_proof) for f in entry.files
if hasattr(f.attributes, _associated_proof)
]
+
def generate_test_execution_aliases(env, test):
installed = [test]
if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test):
@@ -90,7 +93,8 @@ def generate_test_execution_aliases(env, test):
verbose_source_command = test_env.Command(
target=f"#+{target_name}-{source_name}",
source=installed[0],
- action="$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
+ action=
+ "$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
TEST_SOURCE_FILE_NAME=source_name,
NINJA_POOL="console",
)
@@ -102,7 +106,10 @@ def generate_test_execution_aliases(env, test):
alias = env.Alias(f'+{source_name}', verbose_source_command)
if len(alias[0].children()) > 1:
- raise SCons.Errors.BuildError(alias[0].children()[0], f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique.")
+ raise SCons.Errors.BuildError(
+ alias[0].children()[0],
+ f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique."
+ )
proof_generator_command = test_env.Command(
target=[
@@ -110,11 +117,8 @@ def generate_test_execution_aliases(env, test):
'${SOURCE}.status',
],
source=installed[0],
- action=SCons.Action.Action(
- "$PROOF_GENERATOR_COMMAND",
- "$PROOF_GENERATOR_COMSTR"
- ),
- source_scanner=proof_generator_command_scanner
+ action=SCons.Action.Action("$PROOF_GENERATOR_COMMAND", "$PROOF_GENERATOR_COMSTR"),
+ source_scanner=proof_generator_command_scanner,
)
# We assume tests are provable by default, but some tests may not
@@ -128,10 +132,7 @@ def generate_test_execution_aliases(env, test):
proof_analyzer_command = test_env.Command(
target='${SOURCES[1].base}.proof',
source=proof_generator_command,
- action=SCons.Action.Action(
- "$PROOF_ANALYZER_COMMAND",
- "$PROOF_ANALYZER_COMSTR"
- )
+ action=SCons.Action.Action("$PROOF_ANALYZER_COMMAND", "$PROOF_ANALYZER_COMSTR"),
)
proof_analyzer_alias = env.Alias(
@@ -143,6 +144,7 @@ def generate_test_execution_aliases(env, test):
# TODO: Should we enable proof at the file level?
+
def exists(env):
return True
@@ -153,14 +155,13 @@ def generate(env):
env.AddMethod(generate_test_execution_aliases, "GenerateTestExecutionAliases")
env["TEST_EXECUTION_SUFFIX_DENYLIST"] = env.get(
- "TEST_EXECUTION_SUFFIX_DENYLIST", [".in"]
+ "TEST_EXECUTION_SUFFIX_DENYLIST",
+ [".in"],
)
- env.AppendUnique(
- AIB_TASKS={
- "prove": (auto_prove_task, False),
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "prove": (auto_prove_task, False),
+ })
# TODO: Should we have some sort of prefix_xdir for the output location for these? Something like
# $PREFIX_VARCACHE and which in our build is pre-populated to $PREFIX/var/cache/mongo or similar?
diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py
index 1b02c52eb8e..a000c85cca2 100644
--- a/site_scons/site_tools/mongo_test_list.py
+++ b/site_scons/site_tools/mongo_test_list.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building test lists for Resmoke"""
import SCons
@@ -63,9 +62,9 @@ def test_list_builder_action(env, target, source):
TEST_LIST_BUILDER = SCons.Builder.Builder(
action=SCons.Action.FunctionAction(
- test_list_builder_action, {"cmdstr": "Generating $TARGETS"},
- )
-)
+ test_list_builder_action,
+ {"cmdstr": "Generating $TARGETS"},
+ ))
def exists(env):
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index 33373282606..f06a64e191d 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -19,12 +19,12 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering unit tests."""
from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -53,9 +53,8 @@ def build_cpp_unit_test(env, target, source, **kwargs):
unit_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- unit_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(unit_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(unit_test_components)
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 1c76bd92478..df42f03f884 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Generate build.ninja files from SCons aliases."""
import sys
@@ -79,9 +78,10 @@ def _mkdir_action_function(env, node):
# to an invalid ninja file.
"variables": {
# On Windows mkdir "-p" is always on
- "cmd": "mkdir {args}".format(
- args = ' '.join(get_outputs(node)) + " & exit /b 0" if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)),
- ),
+ "cmd":
+ "mkdir {args}".format(
+ args=' '.join(get_outputs(node)) + " & exit /b 0"
+ if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)), ),
},
}
@@ -130,9 +130,7 @@ def alias_to_ninja_build(node):
return {
"outputs": get_outputs(node),
"rule": "phony",
- "implicit": [
- get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)
- ],
+ "implicit": [get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)],
}
@@ -140,18 +138,22 @@ def get_order_only(node):
"""Return a list of order only dependencies for node."""
if node.prerequisites is None:
return []
- return [get_path(src_file(prereq)) for prereq in node.prerequisites if is_valid_dependent_node(prereq)]
+ return [
+ get_path(src_file(prereq)) for prereq in node.prerequisites
+ if is_valid_dependent_node(prereq)
+ ]
def get_dependencies(node, skip_sources=False):
"""Return a list of dependencies for node."""
if skip_sources:
return [
- get_path(src_file(child))
- for child in node.children()
+ get_path(src_file(child)) for child in node.children()
if child not in node.sources and is_valid_dependent_node(child)
]
- return [get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)]
+ return [
+ get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)
+ ]
def get_inputs(node, skip_unknown_types=False):
@@ -180,10 +182,12 @@ def get_inputs(node, skip_unknown_types=False):
else:
if skip_unknown_types:
continue
- raise Exception("Can't process {} node '{}' as an input for '{}'".format(
- type(input_node),
- str(input_node),
- str(node)))
+ raise Exception(
+ "Can't process {} node '{}' as an input for '{}'".format(
+ type(input_node),
+ str(input_node),
+ str(node),
+ ), )
# convert node items into raw paths/aliases for ninja
return [get_path(src_file(o)) for o in ninja_nodes]
@@ -204,6 +208,7 @@ def get_outputs(node):
return outputs
+
def generate_depfile(env, node, dependencies):
"""
Ninja tool function for writing a depfile. The depfile should include
@@ -239,6 +244,7 @@ def generate_depfile(env, node, dependencies):
with open(depfile, 'w') as f:
f.write(depfile_contents)
+
class SConsToNinjaTranslator:
"""Translates SCons Actions into Ninja build objects."""
@@ -330,25 +336,19 @@ class SConsToNinjaTranslator:
if handler is not None:
return handler(node.env if node.env else self.env, node)
- raise Exception(
- "Found unhandled function action {}, "
- " generating scons command to build\n"
- "Note: this is less efficient than Ninja,"
- " you can write your own ninja build generator for"
- " this function using NinjaRegisterFunctionHandler".format(name)
- )
+ raise Exception("Found unhandled function action {}, "
+ " generating scons command to build\n"
+ "Note: this is less efficient than Ninja,"
+ " you can write your own ninja build generator for"
+ " this function using NinjaRegisterFunctionHandler".format(name))
# pylint: disable=too-many-branches
def handle_list_action(self, node, action):
"""TODO write this comment"""
results = [
- self.action_to_ninja_build(node, action=act)
- for act in action.list
- if act is not None
- ]
- results = [
- result for result in results if result is not None and result["outputs"]
+ self.action_to_ninja_build(node, action=act) for act in action.list if act is not None
]
+ results = [result for result in results if result is not None and result["outputs"]]
if not results:
return None
@@ -450,22 +450,28 @@ class NinjaState:
scons_escape = env.get("ESCAPE", lambda x: x)
self.variables = {
- "COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
- "NOOP": "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
- "SCONS_INVOCATION": "{} {} __NINJA_NO=1 $out".format(
- sys.executable,
- " ".join(
- [ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS]
+ "COPY":
+ "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
+ "NOOP":
+ "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
+ "SCONS_INVOCATION":
+ "{} {} __NINJA_NO=1 $out".format(
+ sys.executable,
+ " ".join([
+ ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv
+ if arg not in COMMAND_LINE_TARGETS
+ ]),
),
- ),
- "SCONS_INVOCATION_W_TARGETS": "{} {}".format(
- sys.executable, " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])
- ),
+ "SCONS_INVOCATION_W_TARGETS":
+ "{} {}".format(
+ sys.executable,
+ " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])),
# This must be set to a global default per:
# https://ninja-build.org/manual.html
#
# (The deps section)
- "msvc_deps_prefix": "Note: including file:",
+ "msvc_deps_prefix":
+ "Note: including file:",
}
self.rules = {
@@ -505,20 +511,21 @@ class NinjaState:
# to do the same. See related for more info:
# https://jira.mongodb.org/browse/SERVER-49457
"AR": {
- "command": "{}$env$AR @$out.rsp".format(
- '' if sys.platform == "win32" else "rm -f $out && "
- ),
- "description": "Archiving $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- "pool": "local_pool",
+ "command":
+ "{}$env$AR @$out.rsp".format('' if sys.platform == "win32" else "rm -f $out && "
+ ),
+ "description":
+ "Archiving $out",
+ "rspfile":
+ "$out.rsp",
+ "rspfile_content":
+ "$rspc",
+ "pool":
+ "local_pool",
},
"SYMLINK": {
"command": (
- "cmd /c mklink $out $in"
- if sys.platform == "win32"
- else "ln -s $in $out"
- ),
+ "cmd /c mklink $out $in" if sys.platform == "win32" else "ln -s $in $out"),
"description": "Symlink $in -> $out",
},
"NOOP": {
@@ -678,20 +685,17 @@ class NinjaState:
if generated_sources_alias and generated_sources_build:
generated_source_files = sorted(
- [] if not generated_sources_build else generated_sources_build['implicit']
- )
+ [] if not generated_sources_build else generated_sources_build['implicit'])
+
def check_generated_source_deps(build):
- return (
- build != generated_sources_build
- and set(build["outputs"]).isdisjoint(generated_source_files)
- )
+ return (build != generated_sources_build
+ and set(build["outputs"]).isdisjoint(generated_source_files))
else:
generated_sources_build = None
generated_source_files = sorted({
output
# First find builds which have header files in their outputs.
- for build in self.builds.values()
- if self.has_generated_sources(build["outputs"])
+ for build in self.builds.values() if self.has_generated_sources(build["outputs"])
for output in build["outputs"]
# Collect only the header files from the builds with them
# in their output. We do this because is_generated_source
@@ -706,14 +710,13 @@ class NinjaState:
ninja.build(
outputs=generated_sources_alias,
rule="phony",
- implicit=generated_source_files
+ implicit=generated_source_files,
)
+
def check_generated_source_deps(build):
- return (
- not build["rule"] == "INSTALL"
- and set(build["outputs"]).isdisjoint(generated_source_files)
- and set(build.get("implicit", [])).isdisjoint(generated_source_files)
- )
+ return (not build["rule"] == "INSTALL"
+ and set(build["outputs"]).isdisjoint(generated_source_files)
+ and set(build.get("implicit", [])).isdisjoint(generated_source_files))
template_builders = []
@@ -730,10 +733,7 @@ class NinjaState:
# sources and none of the direct implicit dependencies are
# generated sources or else we will create a dependency
# cycle.
- if (
- generated_source_files
- and check_generated_source_deps(build)
- ):
+ if (generated_source_files and check_generated_source_deps(build)):
# Make all non-generated source targets depend on
# _generated_sources. We use order_only for generated
@@ -787,7 +787,9 @@ class NinjaState:
if remaining_outputs:
ninja.build(
- outputs=sorted(remaining_outputs), rule="phony", implicit=first_output,
+ outputs=sorted(remaining_outputs),
+ rule="phony",
+ implicit=first_output,
)
build["outputs"] = first_output
@@ -799,7 +801,8 @@ class NinjaState:
# be repurposed for anything, as long as you have a way to regenerate the depfile.
# More specific info can be found here: https://ninja-build.org/manual.html#_depfile
if rule is not None and rule.get('depfile') and build.get('deps_files'):
- path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
+ path = build['outputs'] if SCons.Util.is_List(
+ build['outputs']) else [build['outputs']]
generate_depfile(self.env, path[0], build.pop('deps_files', []))
if "inputs" in build:
@@ -842,7 +845,8 @@ class NinjaState:
# list of build generation about. However, because the generate rule
# is hardcoded here, we need to do this generate_depfile call manually.
ninja_file_path = self.env.File(ninja_file).path
- ninja_in_file_path = os.path.join(get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
+ ninja_in_file_path = os.path.join(
+ get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
generate_depfile(
self.env,
ninja_in_file_path,
@@ -876,23 +880,23 @@ class NinjaState:
pool="console",
implicit=[ninja_file],
variables={
- "cmd": "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
- ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else ''
- )
+ "cmd":
+ "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
+ ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else '')
},
order_only=[generated_sources_alias],
)
ninja.build(
- "compiledb", rule="phony", implicit=["compile_commands.json"],
+ "compiledb",
+ rule="phony",
+ implicit=["compile_commands.json"],
)
# Look in SCons's list of DEFAULT_TARGETS, find the ones that
# we generated a ninja build rule for.
scons_default_targets = [
- get_path(tgt)
- for tgt in SCons.Script.DEFAULT_TARGETS
- if get_path(tgt) in self.built
+ get_path(tgt) for tgt in SCons.Script.DEFAULT_TARGETS if get_path(tgt) in self.built
]
# If we found an overlap between SCons's list of default
@@ -972,8 +976,7 @@ def get_command_env(env, target, source):
ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env, target, source)
scons_specified_env = {
key: value
- for key, value in ENV.items()
- if key not in os.environ or os.environ.get(key, None) != value
+ for key, value in ENV.items() if key not in os.environ or os.environ.get(key, None) != value
}
windows = env["PLATFORM"] == "win32"
@@ -1002,7 +1005,8 @@ def get_command_env(env, target, source):
# doesn't make builds on paths with spaces (Ninja and SCons issues)
# nor expanding response file paths with spaces (Ninja issue) work.
value = value.replace(r' ', r'$ ')
- command_env += "export {}='{}';".format(key, env.subst(value, target=target, source=source))
+ command_env += "export {}='{}';".format(key,
+ env.subst(value, target=target, source=source))
env["NINJA_ENV_VAR_CACHE"] = command_env
return command_env
@@ -1030,15 +1034,11 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
cmd_list, _, _ = action.process(targets, sources, env, executor=executor)
cmd_list = [str(c).replace("$", "$$") for c in cmd_list[0]]
else:
- command = generate_command(
- env, node, action, targets, sources, executor=executor
- )
+ command = generate_command(env, node, action, targets, sources, executor=executor)
cmd_list = shlex.split(command)
if tool_is_dynamic:
- tool_command = env.subst(
- tool, target=targets, source=sources, executor=executor
- )
+ tool_command = env.subst(tool, target=targets, source=sources, executor=executor)
else:
tool_command = tool
@@ -1046,11 +1046,8 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
# Add 1 so we always keep the actual tool inside of cmd
tool_idx = cmd_list.index(tool_command) + 1
except ValueError:
- raise Exception(
- "Could not find tool {} in {} generated from {}".format(
- tool, cmd_list, get_comstr(env, action, targets, sources)
- )
- )
+ raise Exception("Could not find tool {} in {} generated from {}".format(
+ tool, cmd_list, get_comstr(env, action, targets, sources)))
cmd, rsp_content = cmd_list[:tool_idx], cmd_list[tool_idx:]
rsp_content = " ".join(rsp_content)
@@ -1062,7 +1059,10 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
for key, value in custom_env.items():
variables["env"] += env.subst(
- f"export {key}={value};", target=targets, source=sources, executor=executor
+ f"export {key}={value};",
+ target=targets,
+ source=sources,
+ executor=executor,
) + " "
return rule, variables, [tool_command]
@@ -1114,7 +1114,7 @@ def get_generic_shell_command(env, node, action, targets, sources, executor=None
# generally this function will not be used soley and is more like a template to generate
# the basics for a custom provider which may have more specific options for a provier
# function for a custom NinjaRuleMapping.
- []
+ [],
)
@@ -1151,7 +1151,14 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
return None
provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command)
- rule, variables, provider_deps = provider(sub_env, node, action, tlist, slist, executor=executor)
+ rule, variables, provider_deps = provider(
+ sub_env,
+ node,
+ action,
+ tlist,
+ slist,
+ executor=executor,
+ )
# Get the dependencies for all targets
implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)})
@@ -1174,7 +1181,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# in some case the tool could be in the local directory and be suppled without the ext
# such as in windows, so append the executable suffix and check.
prog_suffix = sub_env.get('PROGSUFFIX', '')
- provider_dep_ext = provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix
+ provider_dep_ext = provider_dep if provider_dep.endswith(
+ prog_suffix) else provider_dep + prog_suffix
if os.path.exists(provider_dep_ext):
implicit.append(provider_dep_ext)
continue
@@ -1182,7 +1190,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# Many commands will assume the binary is in the path, so
# we accept this as a possible input from a given command.
- provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(provider_dep, path=os.environ["PATH"])
+ provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(
+ provider_dep, path=os.environ["PATH"])
if provider_dep_abspath:
implicit.append(provider_dep_abspath)
continue
@@ -1262,7 +1271,8 @@ def register_custom_rule_mapping(env, pre_subst_string, rule):
__NINJA_RULE_MAPPING[pre_subst_string] = rule
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False, use_response_file=False, response_file_content="$rspc"):
+def register_custom_rule(env, rule, command, description="", deps=None, pool=None,
+ use_depfile=False, use_response_file=False, response_file_content="$rspc"):
"""Allows specification of Ninja rules from inside SCons files."""
rule_obj = {
"command": command,
@@ -1289,10 +1299,12 @@ def register_custom_pool(env, pool, size):
"""Allows the creation of custom Ninja pools"""
env[NINJA_POOLS][pool] = size
+
def set_build_node_callback(env, node, callback):
if 'conftest' not in str(node):
setattr(node.attributes, "ninja_build_callback", callback)
+
def ninja_csig(original):
"""Return a dummy csig"""
@@ -1316,6 +1328,7 @@ def ninja_contents(original):
return wrapper
+
def CheckNinjaCompdbExpand(env, context):
""" Configure check testing if ninja's compdb can expand response files"""
@@ -1333,11 +1346,13 @@ def CheckNinjaCompdbExpand(env, context):
cmd = echo
pool = console
rspc = "test"
- """))
+ """),
+ )
result = '@fake_output.txt.rsp' not in output
context.Result(result)
return result
+
def ninja_stat(_self, path):
"""
Eternally memoized stat call.
@@ -1464,9 +1479,13 @@ def generate(env):
# exists upstream: https://github.com/SCons/scons/issues/3625
def ninja_generate_deps(env):
return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True))
+
env['_NINJA_REGENERATE_DEPS_FUNC'] = ninja_generate_deps
- env['NINJA_REGENERATE_DEPS'] = env.get('NINJA_REGENERATE_DEPS', '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}')
+ env['NINJA_REGENERATE_DEPS'] = env.get(
+ 'NINJA_REGENERATE_DEPS',
+ '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}',
+ )
# This adds the required flags such that the generated compile
# commands will create depfiles as appropriate in the Ninja file.
@@ -1515,12 +1534,8 @@ def generate(env):
from SCons.Tool.mslink import compositeLinkAction
if env["LINKCOM"] == compositeLinkAction:
- env[
- "LINKCOM"
- ] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
- env[
- "SHLINKCOM"
- ] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
+ env["LINKCOM"] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
+ env["SHLINKCOM"] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
# Normally in SCons actions for the Program and *Library builders
# will return "${*COM}" as their pre-subst'd command line. However
@@ -1612,12 +1627,8 @@ def generate(env):
# slows down the build significantly and we don't need contents or
# content signatures calculated when generating a ninja file since
# we're not doing any SCons caching or building.
- SCons.Executor.Executor.get_contents = ninja_contents(
- SCons.Executor.Executor.get_contents
- )
- SCons.Node.Alias.Alias.get_contents = ninja_contents(
- SCons.Node.Alias.Alias.get_contents
- )
+ SCons.Executor.Executor.get_contents = ninja_contents(SCons.Executor.Executor.get_contents)
+ SCons.Node.Alias.Alias.get_contents = ninja_contents(SCons.Node.Alias.Alias.get_contents)
SCons.Node.FS.File.get_contents = ninja_contents(SCons.Node.FS.File.get_contents)
SCons.Node.FS.File.get_csig = ninja_csig(SCons.Node.FS.File.get_csig)
SCons.Node.FS.Dir.get_csig = ninja_csig(SCons.Node.FS.Dir.get_csig)
@@ -1689,9 +1700,10 @@ def generate(env):
try:
emitter = builder.emitter
if emitter is not None:
- builder.emitter = SCons.Builder.ListEmitter(
- [emitter, ninja_file_depends_on_all]
- )
+ builder.emitter = SCons.Builder.ListEmitter([
+ emitter,
+ ninja_file_depends_on_all,
+ ], )
else:
builder.emitter = ninja_file_depends_on_all
# Users can inject whatever they want into the BUILDERS
diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py
index 677ef75723e..08c78f4ef32 100644
--- a/site_scons/site_tools/separate_debug.py
+++ b/site_scons/site_tools/separate_debug.py
@@ -34,15 +34,15 @@ def _update_builder(env, builder):
if origin is not None:
origin_results = old_scanner(origin, env, path)
for origin_result in origin_results:
- origin_result_debug_files = getattr(
- origin_result.attributes, "separate_debug_files", None
- )
+ origin_result_debug_files = getattr(origin_result.attributes,
+ "separate_debug_files", None)
if origin_result_debug_files is not None:
results.extend(origin_result_debug_files)
return results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=old_path_function,
+ function=new_scanner,
+ path_function=old_path_function,
)
base_action = builder.action
@@ -57,31 +57,27 @@ def _update_builder(env, builder):
# setup from the etc/scons/xcode_*.vars files, which would be a
# win as well.
if env.TargetOSIs("darwin"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
- "$DSYMUTILCOMSTR"
- ),
- SCons.Action.Action(
- "$STRIP -S ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
+ "$DSYMUTILCOMSTR",
+ ),
+ SCons.Action.Action(
+ "$STRIP -S ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
elif env.TargetOSIs("posix"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
- "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR"
- ),
- SCons.Action.Action(
- "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
+ "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR",
+ ),
+ SCons.Action.Action(
+ "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
else:
pass
@@ -109,13 +105,15 @@ def _update_builder(env, builder):
plist_file = env.File("Contents/Info.plist", directory=dsym_dir)
setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(plist_file.attributes, "aib_additional_directory", "{}/Contents".format(dsym_dir_name))
+ setattr(plist_file.attributes, "aib_additional_directory",
+ "{}/Contents".format(dsym_dir_name))
dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir)
dwarf_file = env.File(target0.name, directory=dwarf_dir)
setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(dwarf_file.attributes, "aib_additional_directory", "{}/Contents/Resources/DWARF".format(dsym_dir_name))
+ setattr(dwarf_file.attributes, "aib_additional_directory",
+ "{}/Contents/Resources/DWARF".format(dsym_dir_name))
debug_files.extend([plist_file, dwarf_file])
@@ -174,8 +172,10 @@ def generate(env):
if not env.Verbose():
env.Append(
- OBJCOPY_ONLY_KEEP_DEBUG_COMSTR="Generating debug info for $TARGET into ${TARGET}.dSYM",
- DEBUGSTRIPCOMSTR="Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
+ OBJCOPY_ONLY_KEEP_DEBUG_COMSTR=
+ "Generating debug info for $TARGET into ${TARGET}.dSYM",
+ DEBUGSTRIPCOMSTR=
+ "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
)
for builder in ["Program", "SharedLibrary", "LoadableModule"]:
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
index 710d828945a..72316dfb968 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -66,7 +66,10 @@ def generate(env):
if not suffix in suffixes:
continue
base = emitterdict[suffix]
- emitterdict[suffix] = SCons.Builder.ListEmitter([base, _dwo_emitter,])
+ emitterdict[suffix] = SCons.Builder.ListEmitter([
+ base,
+ _dwo_emitter,
+ ])
def exists(env):
diff --git a/site_scons/site_tools/tapilink.py b/site_scons/site_tools/tapilink.py
index 0521767fc06..d2fc4b8c340 100644
--- a/site_scons/site_tools/tapilink.py
+++ b/site_scons/site_tools/tapilink.py
@@ -26,6 +26,7 @@ import subprocess
# TODO: DRY this with abilink.py by moving duplicated code out to a common
# support module.
+
def _detect(env):
try:
tapi = env["TAPI"]
@@ -70,9 +71,11 @@ def _add_scanner(builder):
return (getattr(env.Entry(o).attributes, "tbd", o) for o in old_scanner(node, env, path))
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
+
def _add_action(builder):
actions = builder.action
@@ -83,12 +86,11 @@ def _add_action(builder):
# invoking TAPI proves to be expensive, we could address this by
# instead post-processing the "real" .tbd file to strip out the
# UUID, and then potentially even feed it into a hash algorithm.
- builder.action = actions + SCons.Action.Action(
- [
- "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
- "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}"
- ]
- )
+ builder.action = actions + SCons.Action.Action([
+ "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
+ "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}",
+ ])
+
def exists(env):
result = _detect(env) != None
diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py
index 5700996a054..7d34a6bfd37 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -92,7 +92,8 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
@@ -101,8 +102,7 @@ def generate(env):
return
env["ARFLAGS"] = SCons.Util.CLVar(
- [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]]
- )
+ [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]])
# Disable running ranlib, since we added 's' above
env["RANLIBCOM"] = ""
diff --git a/site_scons/site_tools/validate_cache_dir.py b/site_scons/site_tools/validate_cache_dir.py
index b5faee9b3e0..3bd07462ade 100644
--- a/site_scons/site_tools/validate_cache_dir.py
+++ b/site_scons/site_tools/validate_cache_dir.py
@@ -29,19 +29,21 @@ import shutil
import tempfile
import traceback
-
import SCons
cache_debug_suffix = " (target: %s, cachefile: %s) "
+
class InvalidChecksum(SCons.Errors.BuildError):
def __init__(self, src, dst, reason, cache_csig='', computed_csig=''):
self.message = f"ERROR: md5 checksum {reason} for {src} ({dst})"
self.cache_csig = cache_csig
self.computed_csig = computed_csig
+
def __str__(self):
return self.message
+
class CacheTransferFailed(SCons.Errors.BuildError):
def __init__(self, src, dst, reason):
self.message = f"ERROR: cachedir transfer {reason} while transfering {src} to {dst}"
@@ -49,6 +51,7 @@ class CacheTransferFailed(SCons.Errors.BuildError):
def __str__(self):
return self.message
+
class UnsupportedError(SCons.Errors.BuildError):
def __init__(self, class_name, feature):
self.message = f"{class_name} does not support {feature}"
@@ -56,8 +59,8 @@ class UnsupportedError(SCons.Errors.BuildError):
def __str__(self):
return self.message
-class CacheDirValidate(SCons.CacheDir.CacheDir):
+class CacheDirValidate(SCons.CacheDir.CacheDir):
def __init__(self, path):
self.json_log = None
super().__init__(path)
@@ -70,7 +73,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
@staticmethod
def get_file_contents_path(default_cachefile_path):
- return pathlib.Path(default_cachefile_path) / pathlib.Path(default_cachefile_path).name.split('.')[0]
+ return pathlib.Path(default_cachefile_path) / pathlib.Path(
+ default_cachefile_path).name.split('.')[0]
@staticmethod
def get_bad_cachefile_path(cksum_cachefile_dir):
@@ -96,17 +100,20 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
src_file = cls.get_file_contents_path(src)
# using os.path.exists here because: https://bugs.python.org/issue35306
if os.path.exists(str(cls.get_bad_cachefile_path(src))):
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
csig = None
try:
with open(cls.get_hash_path(src_file), 'rb') as f_out:
csig = f_out.read().decode().strip()
except OSError as ex:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
else:
if not csig:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"no content_hash data found")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"no content_hash data found")
with tempfile.TemporaryDirectory() as tmpdirname:
dst_tmp = pathlib.Path(tmpdirname) / os.path.basename(dst)
@@ -118,11 +125,12 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
shutil.move(dst_tmp, dst)
new_csig = SCons.Util.MD5filesignature(dst,
- chunksize=SCons.Node.FS.File.md5_chunksize*1024)
+ chunksize=SCons.Node.FS.File.md5_chunksize * 1024)
if csig != new_csig:
raise InvalidChecksum(
- cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}", cache_csig=csig, computed_csig=new_csig)
+ cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}",
+ cache_csig=csig, computed_csig=new_csig)
@classmethod
def copy_to_cache(cls, env, src, dst):
@@ -145,9 +153,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
raise CacheTransferFailed(src, dst_file, f"failed to create hash file: {ex}") from ex
def log_json_cachedebug(self, node, pushing=False):
- if (pushing
- and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
- return
+ if (pushing and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
+ return
cachefile = self.get_file_contents_path(self.cachepath(node)[1])
if node.fs.exists(cachefile):
@@ -213,8 +220,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
self.debugFP.write(self._format_exception_msg())
def _format_exception_msg(self):
- return ('An exception was detected while using the cache:\n' +
- ' ' + "\n ".join("".join(traceback.format_exc()).split("\n"))) + '\n'
+ return ('An exception was detected while using the cache:\n' + ' ' + "\n ".join(
+ "".join(traceback.format_exc()).split("\n"))) + '\n'
def _log(self, log_msg, json_info, realnode, cachefile):
self.CacheDebug(log_msg + cache_debug_suffix, realnode, cachefile)
@@ -241,12 +248,16 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return
msg = f"Removed bad cachefile {cksum_dir} found in cache."
- self._log(msg, {
- 'type': 'invalid_checksum',
- 'cache_csig': cache_csig,
- 'computed_csig': computed_csig
- }, node, cksum_dir)
-
+ self._log(
+ msg,
+ {
+ 'type': 'invalid_checksum',
+ 'cache_csig': cache_csig,
+ 'computed_csig': computed_csig,
+ },
+ node,
+ cksum_dir,
+ )
def get_cachedir_csig(self, node):
cachedir, cachefile = self.cachepath(node)
@@ -263,9 +274,11 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return dir, path
return dir, str(self.get_cachedir_path(path))
+
def exists(env):
return True
+
def generate(env):
if not env.get('CACHEDIR_CLASS'):
env['CACHEDIR_CLASS'] = CacheDirValidate
diff --git a/site_scons/site_tools/vcredist.py b/site_scons/site_tools/vcredist.py
index 5c8effaadc4..0f86629b281 100644
--- a/site_scons/site_tools/vcredist.py
+++ b/site_scons/site_tools/vcredist.py
@@ -137,8 +137,7 @@ def generate(env):
vs_version = int(msvc_major) + int(msvc_minor)
vs_version_next = vs_version + 1
vs_version_range = "[{vs_version}.0, {vs_version_next}.0)".format(
- vs_version=vs_version, vs_version_next=vs_version_next
- )
+ vs_version=vs_version, vs_version_next=vs_version_next)
if not programfilesx86:
programfilesx86 = _get_programfiles()
@@ -146,25 +145,19 @@ def generate(env):
return
# Use vswhere (it has a fixed stable path) to query where Visual Studio is installed.
- env["MSVS"]["VSINSTALLDIR"] = (
- subprocess.check_output(
- [
- os.path.join(
- programfilesx86,
- "Microsoft Visual Studio",
- "Installer",
- "vswhere.exe",
- ),
- "-version",
- vs_version_range,
- "-property",
- "installationPath",
- "-nologo",
- ]
- )
- .decode("utf-8")
- .strip()
- )
+ env["MSVS"]["VSINSTALLDIR"] = (subprocess.check_output([
+ os.path.join(
+ programfilesx86,
+ "Microsoft Visual Studio",
+ "Installer",
+ "vswhere.exe",
+ ),
+ "-version",
+ vs_version_range,
+ "-property",
+ "installationPath",
+ "-nologo",
+ ]).decode("utf-8").strip())
vsinstall_dir = env["MSVS"]["VSINSTALLDIR"]
@@ -179,19 +172,15 @@ def generate(env):
# TOOO: This x64 needs to be abstracted away. Is it the host
# arch, or the target arch? My guess is host.
vsruntime_key_name = "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format(
- msvc_major=msvc_major
- )
+ msvc_major=msvc_major)
vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vsruntime_key_name)
- vslib_version, vslib_version_type = winreg.QueryValueEx(
- vsruntime_key, "Version"
- )
+ vslib_version, vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version")
except WindowsError:
return
# Fallback to directory search if we don't find the expected version
- redist_path = os.path.join(
- redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1)
- )
+ redist_path = os.path.join(redist_root,
+ re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1))
if not os.path.isdir(redist_path):
redist_path = None
dirs = os.listdir(redist_root)
@@ -228,9 +217,7 @@ def generate(env):
if not expansion:
return
- vcredist_candidates = [
- c.format(expansion) for c in vcredist_search_template_sequence
- ]
+ vcredist_candidates = [c.format(expansion) for c in vcredist_search_template_sequence]
for candidate in vcredist_candidates:
candidate = os.path.join(redist_path, candidate)
if os.path.isfile(candidate):
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
index d40528d3a54..3db0aca4719 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -34,5 +34,4 @@ def generate(env):
if "DEVELOPER_DIR" in os.environ:
env["ENV"]["DEVELOPER_DIR"] = os.environ["DEVELOPER_DIR"]
print(
- "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands"
- )
+ "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands")