summaryrefslogtreecommitdiff
path: root/site_scons/site_tools
diff options
context:
space:
mode:
authorTausif Rahman <tausif.rahman@mongodb.com>2022-05-24 22:55:22 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2022-05-25 17:40:16 +0000
commit9ea1d6f0419938770eea612479c75838f75752b7 (patch)
tree26b57fa102878dba3251b40f7ddf9c50ab8b275a /site_scons/site_tools
parent3ad805fea14b2f1d5c5a367f6ec05ece93502f32 (diff)
downloadmongo-9ea1d6f0419938770eea612479c75838f75752b7.tar.gz
SERVER-66490 Apply pylinters to build system code
Diffstat (limited to 'site_scons/site_tools')
-rw-r--r--site_scons/site_tools/abilink.py6
-rw-r--r--site_scons/site_tools/auto_archive.py24
-rw-r--r--site_scons/site_tools/auto_install_binaries.py120
-rw-r--r--site_scons/site_tools/ccache.py11
-rw-r--r--site_scons/site_tools/compilation_db.py22
-rw-r--r--site_scons/site_tools/distsrc.py50
-rw-r--r--site_scons/site_tools/forceincludes.py10
-rw-r--r--site_scons/site_tools/git_decider.py1
-rw-r--r--site_scons/site_tools/gziptool.py9
-rw-r--r--site_scons/site_tools/icecream.py130
-rwxr-xr-xsite_scons/site_tools/idl_tool.py29
-rw-r--r--site_scons/site_tools/incremental_link.py15
-rwxr-xr-xsite_scons/site_tools/jstoh.py6
-rw-r--r--site_scons/site_tools/mongo_benchmark.py7
-rw-r--r--site_scons/site_tools/mongo_integrationtest.py7
-rw-r--r--site_scons/site_tools/mongo_libfuzzer.py12
-rw-r--r--site_scons/site_tools/mongo_test_execution.py41
-rw-r--r--site_scons/site_tools/mongo_test_list.py7
-rw-r--r--site_scons/site_tools/mongo_unittest.py7
-rw-r--r--site_scons/site_tools/ninja.py242
-rw-r--r--site_scons/site_tools/separate_debug.py64
-rw-r--r--site_scons/site_tools/split_dwarf.py5
-rw-r--r--site_scons/site_tools/tapilink.py16
-rw-r--r--site_scons/site_tools/thin_archive.py6
-rw-r--r--site_scons/site_tools/validate_cache_dir.py51
-rw-r--r--site_scons/site_tools/vcredist.py51
-rw-r--r--site_scons/site_tools/xcode.py3
27 files changed, 461 insertions, 491 deletions
diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py
index 3670ec24166..f57f63a711d 100644
--- a/site_scons/site_tools/abilink.py
+++ b/site_scons/site_tools/abilink.py
@@ -71,15 +71,15 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
def _add_action(builder):
actions = builder.action
builder.action = actions + SCons.Action.Action(
- "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw"
- )
+ "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw")
def exists(env):
diff --git a/site_scons/site_tools/auto_archive.py b/site_scons/site_tools/auto_archive.py
index b3c9ddd99a4..91cd0c282c7 100644
--- a/site_scons/site_tools/auto_archive.py
+++ b/site_scons/site_tools/auto_archive.py
@@ -76,9 +76,7 @@ def add_package_name_alias(env, component, role, name):
"""Add a package name mapping for the combination of component and role."""
# Verify we didn't get a None or empty string for any argument
if not name:
- raise Exception(
- "when setting a package name alias must provide a name parameter"
- )
+ raise Exception("when setting a package name alias must provide a name parameter")
if not component:
raise Exception("No component provided for package name alias")
if not role:
@@ -90,7 +88,8 @@ def get_package_name(env, component, role):
"""Return the package file name for the component and role combination."""
basename = env[PACKAGE_ALIAS_MAP].get(
# TODO: silent roles shouldn't be included here
- (component, role), "{component}-{role}".format(component=component, role=role)
+ (component, role),
+ "{component}-{role}".format(component=component, role=role),
)
return basename
@@ -234,11 +233,7 @@ def archive_builder(source, target, env, for_signature):
# Collect all the installed files for our entry. This is doing a pure DAG
# walk idea of what should be. So we filter out any that are not in the
# installed set.
- transitive_files = [
- f for f in
- collect_transitive_files(env, entry)
- if f in installed
- ]
+ transitive_files = [f for f in collect_transitive_files(env, entry) if f in installed]
if not transitive_files:
return []
@@ -258,7 +253,7 @@ def archive_builder(source, target, env, for_signature):
return "{prefix} {files}".format(
prefix=command_prefix,
- files=" ".join(relative_files)
+ files=" ".join(relative_files),
)
@@ -274,11 +269,11 @@ def generate(env):
action=SCons.Action.CommandGeneratorAction(
archive_builder,
{"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"},
- )
- )
+ ))
env.Append(BUILDERS={"AutoArchive": bld})
env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = env.get(
- "AUTO_ARCHIVE_TARBALL_SUFFIX", "tar.gz"
+ "AUTO_ARCHIVE_TARBALL_SUFFIX",
+ "tar.gz",
)
env["AUTO_ARCHIVE_ZIP_SUFFIX"] = env.get("AUTO_ARCHIVE_ZIP_SUFFIX", "zip")
env[PACKAGE_ALIAS_MAP] = {}
@@ -297,5 +292,4 @@ def generate(env):
"tar": (auto_archive_gen(env, make_archive_script, "tar"), False),
"zip": (auto_archive_gen(env, make_archive_script, "zip"), False),
"archive": (auto_archive_gen(env, make_archive_script, "auto"), False),
- }
- )
+ })
diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py
index c6429ad396f..55488465d0e 100644
--- a/site_scons/site_tools/auto_install_binaries.py
+++ b/site_scons/site_tools/auto_install_binaries.py
@@ -42,8 +42,10 @@ ROLE_DECLARATIONS = "AIB_ROLE_DECLARATIONS"
SUFFIX_MAP = "AIB_SUFFIX_MAP"
TASKS = "AIB_TASKS"
-
-SuffixMap = namedtuple("SuffixMap", ["directory", "default_role"],)
+SuffixMap = namedtuple(
+ "SuffixMap",
+ ["directory", "default_role"],
+)
class RoleInfo:
@@ -98,24 +100,17 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
for role in roles:
for d in role.dependencies:
if d not in role_names:
- raise Exception(
- "Role dependency '{}' does not name a declared role".format(d)
- )
+ raise Exception("Role dependency '{}' does not name a declared role".format(d))
if isinstance(base_role, str):
if base_role not in role_names:
raise Exception(
- "A base_role argument was provided but it does not name a declared role"
- )
+ "A base_role argument was provided but it does not name a declared role")
elif isinstance(base_role, DeclaredRole):
if base_role not in roles:
- raise Exception(
- "A base_role argument was provided but it is not a declared role"
- )
+ raise Exception("A base_role argument was provided but it is not a declared role")
elif base_role is not None:
- raise Exception(
- "The base_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The base_role argument must be a string name of a role or a role object")
else:
# Set it to something falsey
base_role = str()
@@ -123,17 +118,12 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
if isinstance(meta_role, str):
if meta_role not in role_names:
raise Exception(
- "A meta_role argument was provided but it does not name a declared role"
- )
+ "A meta_role argument was provided but it does not name a declared role")
elif isinstance(meta_role, DeclaredRole):
if meta_role not in roles:
- raise Exception(
- "A meta_role argument was provided but it is not a declared role"
- )
+ raise Exception("A meta_role argument was provided but it is not a declared role")
elif meta_role is not None:
- raise Exception(
- "The meta_role argument must be a string name of a role or a role object"
- )
+ raise Exception("The meta_role argument must be a string name of a role or a role object")
else:
# Set it to something falsy
meta_role = str()
@@ -199,12 +189,7 @@ def get_alias_map_entry(env, component, role):
r_entry.dependencies.add(base_c_entry)
meta_role = env.get(META_ROLE)
- if (
- meta_role
- and role != meta_role
- and meta_component
- and component != meta_component
- ):
+ if (meta_role and role != meta_role and meta_component and component != meta_component):
meta_r_entry = get_alias_map_entry(env, component, meta_role)
meta_c_r_entry = get_alias_map_entry(env, meta_component, meta_role)
meta_c_r_entry.dependencies.add(meta_r_entry)
@@ -259,23 +244,15 @@ def scan_for_transitive_install(node, env, _path):
if component_base_entry.files:
results.update(component_base_entry.files)
- if (
- base_role
- and base_component
- and component != base_component
- and role != base_role
- ):
+ if (base_role and base_component and component != base_component and role != base_role):
base_base_entry = alias_map[base_component][base_role]
if base_base_entry.files:
results.update(base_base_entry.files)
- installed_children = set(
- grandchild
- for child in node.children()
- for direct_children in child.children()
- for grandchild in direct_children.get_executor().get_all_targets()
- if direct_children.get_executor() and grandchild.has_builder()
- )
+ installed_children = set(grandchild for child in node.children()
+ for direct_children in child.children()
+ for grandchild in direct_children.get_executor().get_all_targets()
+ if direct_children.get_executor() and grandchild.has_builder())
for child in installed_children:
auto_installed_files = get_auto_installed_files(env, child)
@@ -324,11 +301,8 @@ def tag_components(env, target, **kwargs):
raise Exception("AIB_COMPONENT must be a string and contain no whitespace.")
if component is None:
- raise Exception(
- "AIB_COMPONENT must be provided; untagged targets: {}".format(
- [t.path for t in target]
- )
- )
+ raise Exception("AIB_COMPONENT must be provided; untagged targets: {}".format(
+ [t.path for t in target]))
if role is None:
raise Exception("AIB_ROLE was not provided.")
@@ -344,11 +318,8 @@ def tag_components(env, target, **kwargs):
# component or base component. These cause dependency cycles because
# get_alias_map_entry will do that wiring for us then we will try to
# map them back on themselves in our loop.
- if (
- component != env.get(BASE_COMPONENT)
- and role != env.get(META_ROLE)
- and component != env.get(META_COMPONENT)
- ):
+ if (component != env.get(BASE_COMPONENT) and role != env.get(META_ROLE)
+ and component != env.get(META_COMPONENT)):
for component in kwargs.get(REVERSE_COMPONENT_DEPENDENCIES, []):
component_dep = get_alias_map_entry(env, component, role)
component_dep.dependencies.add(entry)
@@ -386,9 +357,7 @@ def auto_install_pseudobuilder(env, target, source, **kwargs):
auto_install_mapping = env[SUFFIX_MAP].get(suffix)
if not auto_install_mapping:
- raise Exception(
- "No target provided and no auto install mapping found for:", str(s)
- )
+ raise Exception("No target provided and no auto install mapping found for:", str(s))
target_for_source = auto_install_mapping.directory
@@ -449,14 +418,10 @@ def finalize_install_dependencies(env):
alias_name = generate_alias_name(env, component, role, task)
alias = env.Alias(alias_name, func(env, component, role))
if generate_dependent_aliases:
- dependent_aliases = env.Flatten(
- [
- env.Alias(
- generate_alias_name(env, d.component, d.role, task)
- )
- for d in info.dependencies
- ]
- )
+ dependent_aliases = env.Flatten([
+ env.Alias(generate_alias_name(env, d.component, d.role, task))
+ for d in info.dependencies
+ ])
env.Alias(alias, dependent_aliases)
@@ -499,11 +464,8 @@ def add_suffix_mapping(env, suffix, role=None):
"""Map suffix to role"""
if isinstance(suffix, str):
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role available roles are {}".format(
- role, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role available roles are {}".format(
+ role, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP][env.subst(suffix)] = role
if not isinstance(suffix, dict):
@@ -512,11 +474,8 @@ def add_suffix_mapping(env, suffix, role=None):
for _, mapping in suffix.items():
role = mapping.default_role
if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role. Available roles are {}".format(
- target, env[ROLE_DECLARATIONS].keys()
- )
- )
+ raise Exception("target {} is not a known role. Available roles are {}".format(
+ target, env[ROLE_DECLARATIONS].keys()))
env[SUFFIX_MAP].update({env.subst(key): value for key, value in suffix.items()})
@@ -536,6 +495,7 @@ def list_components(env, **kwargs):
for key in env[ALIAS_MAP]:
print("\t", key)
+
def list_hierarchical_aib_recursive(mapping, counter=0):
if counter == 0:
print(" " * counter, mapping.id)
@@ -582,7 +542,9 @@ def list_targets():
# dedup and sort targets
targets = sorted(list(set(targets)))
- print("The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod")
+ print(
+ "The following are AIB targets. Note that runtime role is implied if not specified. For example, install-mongod"
+ )
tasks_str = ','.join(tasks)
print(f"TASK={{{tasks_str}}}")
roles_str = ','.join(roles)
@@ -618,14 +580,13 @@ def generate(env): # pylint: disable=too-many-statements
env[SUFFIX_MAP] = {}
env[ALIAS_MAP] = defaultdict(dict)
- env.AppendUnique(
- AIB_TASKS={
- "install": auto_install_task,
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "install": auto_install_task,
+ })
env.AddMethod(
- scan_for_transitive_install_pseudobuilder, "GetTransitivelyInstalledFiles"
+ scan_for_transitive_install_pseudobuilder,
+ "GetTransitivelyInstalledFiles",
)
env.AddMethod(get_role_declaration, "GetRoleDeclaration")
env.AddMethod(get_auto_installed_files, "GetAutoInstalledFiles")
@@ -664,5 +625,6 @@ def generate(env): # pylint: disable=too-many-statements
assert base_install_builder.target_scanner is None
base_install_builder.target_scanner = SCons.Scanner.Scanner(
- function=scan_for_transitive_install, path_function=None
+ function=scan_for_transitive_install,
+ path_function=None,
)
diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py
index 2a4b89015d5..dc7ca4cd1e3 100644
--- a/site_scons/site_tools/ccache.py
+++ b/site_scons/site_tools/ccache.py
@@ -75,7 +75,9 @@ def exists(env):
if validated:
env['CCACHE_VERSION'] = ccache_version
else:
- print(f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}")
+ print(
+ f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}"
+ )
return validated
@@ -147,10 +149,8 @@ def generate(env):
# compiler parameter and differences in the file need to be accounted for in the
# hash result to prevent erroneous cache hits.
if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]:
- env["ENV"]["CCACHE_EXTRAFILES"] = ":".join([
- denyfile.path
- for denyfile in env["CCACHE_EXTRAFILES"]
- ])
+ env["ENV"]["CCACHE_EXTRAFILES"] = ":".join(
+ [denyfile.path for denyfile in env["CCACHE_EXTRAFILES"]])
# Make a generator to expand to CCACHE in the case where we are
# not a conftest. We don't want to use ccache for configure tests
@@ -165,6 +165,7 @@ def generate(env):
if "conftest" not in str(target[0]):
return '$CCACHE'
return ''
+
env['CCACHE_GENERATOR'] = ccache_generator
# Add ccache to the relevant command lines. Wrap the reference to
diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py
index 833be4a7c22..7e26b91d258 100644
--- a/site_scons/site_tools/compilation_db.py
+++ b/site_scons/site_tools/compilation_db.py
@@ -142,7 +142,11 @@ def WriteCompilationDb(target, source, env):
with open(str(target[0]), "w") as target_file:
json.dump(
- entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
+ entries,
+ target_file,
+ sort_keys=True,
+ indent=4,
+ separators=(",", ": "),
)
@@ -155,7 +159,8 @@ def generate(env, **kwargs):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
env["COMPILATIONDB_COMSTR"] = kwargs.get(
- "COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
+ "COMPILATIONDB_COMSTR",
+ "Building compilation database $TARGET",
)
components_by_suffix = itertools.chain(
@@ -181,18 +186,19 @@ def generate(env, **kwargs):
# Assumes a dictionary emitter
emitter = builder.emitter[suffix]
- builder.emitter[suffix] = SCons.Builder.ListEmitter(
- [emitter, makeEmitCompilationDbEntry(command),]
- )
+ builder.emitter[suffix] = SCons.Builder.ListEmitter([
+ emitter,
+ makeEmitCompilationDbEntry(command),
+ ])
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
- action=SCons.Action.Action(CompilationDbEntryAction, None),
- )
+ action=SCons.Action.Action(CompilationDbEntryAction, None), )
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
target_scanner=SCons.Scanner.Scanner(
- function=ScanCompilationDb, node_class=None
+ function=ScanCompilationDb,
+ node_class=None,
),
)
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
index 95200775bfd..83f47f2ab3f 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -61,7 +61,10 @@ class DistSrcArchive:
)
elif filename.endswith("zip"):
return DistSrcZipArchive(
- "zip", zipfile.ZipFile(filename, "a"), filename, "a",
+ "zip",
+ zipfile.ZipFile(filename, "a"),
+ filename,
+ "a",
)
def close(self):
@@ -89,13 +92,13 @@ class DistSrcTarArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -109,7 +112,9 @@ class DistSrcTarArchive(DistSrcArchive):
if self.archive_mode == "r":
self.archive_file.close()
self.archive_file = tarfile.open(
- self.archive_name, "a", format=tarfile.PAX_FORMAT,
+ self.archive_name,
+ "a",
+ format=tarfile.PAX_FORMAT,
)
self.archive_mode = "a"
self.archive_file.addfile(file_metadata, fileobj=file_buf)
@@ -141,13 +146,13 @@ class DistSrcZipArchive(DistSrcArchive):
)
def append_file_contents(
- self,
- filename,
- file_contents,
- mtime=None,
- mode=0o644,
- uname="root",
- gname="root",
+ self,
+ filename,
+ file_contents,
+ mtime=None,
+ mode=0o644,
+ uname="root",
+ gname="root",
):
if mtime is None:
mtime = time.time()
@@ -187,15 +192,14 @@ def distsrc_action_generator(source, target, env, for_signature):
print("Invalid file format for distsrc. Must be tar or zip file")
env.Exit(1)
- git_cmd = (
- '"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD'
- % (git_path, target_ext, target[0])
- )
+ git_cmd = ('"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD' %
+ (git_path, target_ext, target[0]))
return [
SCons.Action.Action(git_cmd, "Running git archive for $TARGET"),
SCons.Action.Action(
- run_distsrc_callbacks, "Running distsrc callbacks for $TARGET"
+ run_distsrc_callbacks,
+ "Running distsrc callbacks for $TARGET",
),
]
@@ -206,9 +210,7 @@ def add_callback(env, fn):
def generate(env, **kwargs):
env.AddMethod(add_callback, "AddDistSrcCallback")
- env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(
- generator=distsrc_action_generator,
- )
+ env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder(generator=distsrc_action_generator, )
def DistSrc(env, target, **kwargs):
result = env.__DISTSRC(target=target, source=[], **kwargs)
diff --git a/site_scons/site_tools/forceincludes.py b/site_scons/site_tools/forceincludes.py
index 6d535bf0ba0..7807ca19f7e 100644
--- a/site_scons/site_tools/forceincludes.py
+++ b/site_scons/site_tools/forceincludes.py
@@ -22,6 +22,7 @@
import SCons
+
def _add_scanner(builder):
# We are taking over the target scanner here. If we want to not do
# that we need to invent a ListScanner concept to inject. What if
@@ -35,7 +36,9 @@ def _add_scanner(builder):
# If all nodes could not be resolved, there are missing headers.
if not all(fis):
- missing_headers = [header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node]
+ missing_headers = [
+ header for node, header in zip(fis, env.get('FORCEINCLUDES')) if not node
+ ]
errstring = f"Could not find force include header(s): {missing_headers} in any path in CPPPATH:\n"
for cpppath in env.get('CPPPATH', []):
errstring += f"\t{env.Dir(cpppath).path}\n"
@@ -60,6 +63,7 @@ def _add_scanner(builder):
argument=builder.source_scanner,
)
+
def generate(env, **kwargs):
if not 'FORCEINCLUDEPREFIX' in env:
if 'msvc' in env.get('TOOLS', []):
@@ -82,11 +86,11 @@ def generate(env, **kwargs):
# would enable discovery.
CCFLAGS=[
'$_FORCEINCLUDES',
- ]
- )
+ ])
for object_builder in SCons.Tool.createObjBuilders(env):
_add_scanner(object_builder)
+
def exists(env):
return True
diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py
index 0cb219edc5e..b092b743236 100644
--- a/site_scons/site_tools/git_decider.py
+++ b/site_scons/site_tools/git_decider.py
@@ -20,6 +20,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
+
def generate(env, **kwargs):
# Grab the existing decider functions out of the environment
diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py
index 6d6b0099f97..8f136642dd4 100644
--- a/site_scons/site_tools/gziptool.py
+++ b/site_scons/site_tools/gziptool.py
@@ -34,10 +34,13 @@ def GZipAction(target, source, env, **kw):
def generate(env, **kwargs):
env["BUILDERS"]["__GZIPTOOL"] = SCons.Builder.Builder(
- action=SCons.Action.Action(GZipAction, "$GZIPTOOL_COMSTR")
- )
+ action=SCons.Action.Action(
+ GZipAction,
+ "$GZIPTOOL_COMSTR",
+ ))
env["GZIPTOOL_COMSTR"] = kwargs.get(
- "GZIPTOOL_COMSTR", "Compressing $TARGET with gzip"
+ "GZIPTOOL_COMSTR",
+ "Compressing $TARGET with gzip",
)
def GZipTool(env, target, source, **kwargs):
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index c95a542f008..f7ce0ecd23c 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -87,9 +87,8 @@ def generate(env):
# icecc lower then 1.1 supports addfile remapping accidentally
# and above it adds an empty cpuinfo so handle cpuinfo issues for icecream
# below version 1.1
- if (env['ICECREAM_VERSION'] <= parse_version('1.1')
- and env.ToolchainIs("clang")
- and os.path.exists('/proc/cpuinfo')):
+ if (env['ICECREAM_VERSION'] <= parse_version('1.1') and env.ToolchainIs("clang")
+ and os.path.exists('/proc/cpuinfo')):
env.AppendUnique(ICECC_CREATE_ENV_ADDFILES=[('/proc/cpuinfo', '/dev/null')])
# Absoluteify, so we can derive ICERUN
@@ -115,9 +114,10 @@ def generate(env):
env["CXX"] = env.WhereIs("$CXX")
# Set up defaults for configuration options
- env['ICECREAM_TARGET_DIR'] = env.Dir(
- env.get('ICECREAM_TARGET_DIR', '#./.icecream')
- )
+ env['ICECREAM_TARGET_DIR'] = env.Dir(env.get(
+ 'ICECREAM_TARGET_DIR',
+ '#./.icecream',
+ ), )
verbose = env.get('ICECREAM_VERBOSE', False)
env['ICECC_DEBUG'] = env.get('ICECC_DEBUG', False)
@@ -125,9 +125,7 @@ def generate(env):
# environment doesn't need to see or know about. Make a custom env
# that we use consistently from here to where we end up setting
# ICECREAM_RUN_ICECC in the user env.
- setupEnv = env.Clone(
- NINJA_SKIP=True
- )
+ setupEnv = env.Clone(NINJA_SKIP=True)
if 'ICECC_VERSION' in setupEnv and bool(setupEnv['ICECC_VERSION']):
@@ -161,7 +159,8 @@ def generate(env):
source=[setupEnv.Value(quoted)],
action=SCons.Action.Action(
f"{cmdstr} -o $TARGET $ICECC_VERSION_URL",
- "Downloading compiler package from $ICECC_VERSION_URL" if not verbose else str(),
+ "Downloading compiler package from $ICECC_VERSION_URL"
+ if not verbose else str(),
),
)[0]
@@ -171,8 +170,8 @@ def generate(env):
if not icecc_version_file.exists():
raise Exception(
- 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
- )
+ 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(
+ icecc_version_file, ))
# This is what we are going to call the file names as known to SCons on disk
setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name
@@ -180,27 +179,27 @@ def generate(env):
else:
setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get(
- "ICECC_COMPILER_TYPE", os.path.basename(setupEnv.WhereIs("${CC}"))
+ "ICECC_COMPILER_TYPE",
+ os.path.basename(setupEnv.WhereIs("${CC}")),
)
# This is what we are going to call the file names as known to SCons on disk. We do the
# subst early so that we can call `replace` on the result.
setupEnv["ICECC_VERSION_ID"] = setupEnv.subst(
- "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_"
- )
+ "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_")
setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID",
source=[
"$ICECC_CREATE_ENV",
"$CC",
- "$CXX"
+ "$CXX",
],
action=SCons.Action.Action(
icecc_create_env,
"Generating icecream compiler package: $TARGET" if not verbose else str(),
generator=True,
- )
+ ),
)[0]
# At this point, all paths above have produced a file of some sort. We now move on
@@ -234,38 +233,37 @@ def generate(env):
# file as found on the users filesystem or from
# icecc-create-env. We put the absolute path to that filename into
# a file that we can read from.
- icecc_version_info = setupEnv.File(setupEnv.Command(
- target=[
- '${ICECREAM_TARGET_BASE}.sha256',
- '${ICECREAM_TARGET_BASE}.sha256.path',
- ],
- source=icecc_version_file,
- action=SCons.Action.ListAction(
- [
-
- # icecc-create-env run twice with the same input will
- # create files with identical contents, and identical
- # filenames, but with different hashes because it
- # includes timestamps. So we compute a new hash based
- # on the actual stream contents of the file by
- # untarring it into shasum.
- SCons.Action.Action(
- "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
- "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
- "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
- "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" if not verbose else str(),
- )
+ icecc_version_info = setupEnv.File(
+ setupEnv.Command(
+ target=[
+ '${ICECREAM_TARGET_BASE}.sha256',
+ '${ICECREAM_TARGET_BASE}.sha256.path',
],
- )
- ))
+ source=icecc_version_file,
+ action=SCons.Action.ListAction(
+ [
+
+ # icecc-create-env run twice with the same input will
+ # create files with identical contents, and identical
+ # filenames, but with different hashes because it
+ # includes timestamps. So we compute a new hash based
+ # on the actual stream contents of the file by
+ # untarring it into shasum.
+ SCons.Action.Action(
+ "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
+ "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
+ "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
+ ),
+ SCons.Action.Action(
+ "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
+ "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}"
+ if not verbose else str(),
+ ),
+ ], ),
+ ), )
# We can't allow these to interact with the cache because the
# second action produces a file unknown to SCons. If caching were
@@ -280,13 +278,11 @@ def generate(env):
# wrapper script.
icecc_version_string_value = setupEnv.Command(
target=setupEnv.Value(None),
- source=[
- icecc_version_info[1]
- ],
+ source=[icecc_version_info[1]],
action=SCons.Action.Action(
lambda env, target, source: target[0].write(source[0].get_text_contents()),
"Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(),
- )
+ ),
)[0]
def icecc_version_string_generator(source, target, env, for_signature):
@@ -319,9 +315,9 @@ def generate(env):
'',
],
SUBST_DICT={
- '@icecc@' : '$ICECC',
- '@icecc_version@' : '$ICECC_VERSION',
- '@icecc_version_arch@' : icecc_version_arch_string,
+ '@icecc@': '$ICECC',
+ '@icecc_version@': '$ICECC_VERSION',
+ '@icecc_version_arch@': icecc_version_arch_string,
},
# Don't change around the suffixes
@@ -333,7 +329,7 @@ def generate(env):
# so that it knows to invoke SCons to produce it as part of
# TEMPLATE expansion. Since we have set NINJA_SKIP=True for
# setupEnv, we need to reverse that here.
- NINJA_SKIP=False
+ NINJA_SKIP=False,
)
setupEnv.AddPostAction(
@@ -405,8 +401,7 @@ def generate(env):
continue
base = emitterdict[suffix]
emitterdict[suffix] = SCons.Builder.ListEmitter(
- [base, icecc_toolchain_dependency_emitter]
- )
+ [base, icecc_toolchain_dependency_emitter], )
# Check whether ccache is requested and is a valid tool.
if "CCACHE" in env:
@@ -479,10 +474,10 @@ def generate(env):
shell_env = existing_gen(env, target, source)
else:
shell_env = env['ENV'].copy()
- shell_env['CCACHE_PREFIX'] = env.File(env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
+ shell_env['CCACHE_PREFIX'] = env.File(
+ env.subst("$ICECC_GENERATOR", target=target, source=source)).abspath
return shell_env
-
env['SHELL_ENV_GENERATOR'] = icecc_ccache_prefix_gen
else:
@@ -508,9 +503,10 @@ def generate(env):
# jobs, figure out what sort they are and extend this part of the
# setup.
def icerun_generator(target, source, env, for_signature):
- if "conftest" not in str(target[0]):
- return '$ICERUN'
- return ''
+ if "conftest" not in str(target[0]):
+ return '$ICERUN'
+ return ''
+
env['ICERUN_GENERATOR'] = icerun_generator
icerun_commands = [
@@ -575,7 +571,9 @@ def exists(env):
else:
icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
if not icecc_create_env_bin:
- print(f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected")
+ print(
+ f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected"
+ )
for line in pipe.stdout:
line = line.decode("utf-8")
@@ -594,6 +592,8 @@ def exists(env):
if validated:
env['ICECREAM_VERSION'] = icecc_version
else:
- print(f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}")
+ print(
+ f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}"
+ )
return validated
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
index 04b0db0cd62..dd3b5b65ee3 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""IDL Compiler Scons Tool."""
import os.path
@@ -39,9 +38,7 @@ def idlc_emitter(target, source, env):
first_source = str(source[0])
if not first_source.endswith(".idl"):
- raise ValueError(
- "Bad idl file name '%s', it must end with '.idl' " % (first_source)
- )
+ raise ValueError("Bad idl file name '%s', it must end with '.idl' " % (first_source))
base_file_name, _ = SCons.Util.splitext(str(target[0]))
target_source = env.File(base_file_name + "_gen.cpp")
@@ -79,16 +76,12 @@ def idl_scanner(node, env, path):
try:
with open(str(node), encoding="utf-8") as file_stream:
- parsed_doc = idlc.parser.parse(
- file_stream, str(node), resolver
- )
+ parsed_doc = idlc.parser.parse(file_stream, str(node), resolver)
except OSError:
return nodes_deps_list
if not parsed_doc.errors and parsed_doc.spec.imports is not None:
- nodes_deps_list.extend(
- [env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)]
- )
+ nodes_deps_list.extend([env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)])
setattr(node.attributes, "IDL_NODE_DEPS", nodes_deps_list)
return nodes_deps_list
@@ -122,20 +115,20 @@ def generate(env):
env["IDLC"] = "$PYTHON buildscripts/idl/idlc.py"
base_dir = env.Dir("$BUILD_DIR").path
env["IDLCFLAGS"] = [
- "--include", "src",
- "--base_dir", base_dir,
- "--target_arch", "$TARGET_ARCH",
+ "--include",
+ "src",
+ "--base_dir",
+ base_dir,
+ "--target_arch",
+ "$TARGET_ARCH",
]
env["IDLCCOM"] = "$IDLC $IDLCFLAGS --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES"
env["IDLCCOMSTR"] = ("Generating ${TARGETS[0]}"
- if not env.get("VERBOSE", "").lower() in ['true', '1']
- else None)
+ if not env.get("VERBOSE", "").lower() in ['true', '1'] else None)
env["IDLCSUFFIX"] = ".idl"
global IDL_GLOBAL_DEPS
- IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob(
- "#buildscripts/idl/idl/*.py"
- )
+ IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob("#buildscripts/idl/idl/*.py")
env["IDL_HAS_INLINE_DEPENDENCIES"] = True
diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py
index ebcf3a87dcc..9a9cf3748c4 100644
--- a/site_scons/site_tools/incremental_link.py
+++ b/site_scons/site_tools/incremental_link.py
@@ -32,9 +32,10 @@ def generate(env):
builders = env["BUILDERS"]
for builder in ("Program", "SharedLibrary", "LoadableModule"):
emitter = builders[builder].emitter
- builders[builder].emitter = SCons.Builder.ListEmitter(
- [emitter, _tag_as_precious,]
- )
+ builders[builder].emitter = SCons.Builder.ListEmitter([
+ emitter,
+ _tag_as_precious,
+ ])
def exists(env):
@@ -46,12 +47,8 @@ def exists(env):
# On posix platforms, excluding darwin, we may have enabled
# incremental linking. Check for the relevant flags.
- if (
- env.TargetOSIs("posix")
- and not env.TargetOSIs("darwin")
- and "-fuse-ld=gold" in env["LINKFLAGS"]
- and "-Wl,--incremental" in env["LINKFLAGS"]
- ):
+ if (env.TargetOSIs("posix") and not env.TargetOSIs("darwin")
+ and "-fuse-ld=gold" in env["LINKFLAGS"] and "-Wl,--incremental" in env["LINKFLAGS"]):
return True
return False
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
index 912c495891f..adcb69ed2c5 100755
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -52,10 +52,8 @@ def jsToHeader(target, source):
h.append("0};")
# symbols aren't exported w/o this
h.append("extern const JSFile %s;" % objname)
- h.append(
- 'const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };'
- % (objname, filename.replace("\\", "/"), stringname, stringname)
- )
+ h.append('const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };' %
+ (objname, filename.replace("\\", "/"), stringname, stringname))
h.append("} // namespace JSFiles")
h.append("} // namespace mongo")
diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py
index 5fe35b038d3..e52e03a6809 100644
--- a/site_scons/site_tools/mongo_benchmark.py
+++ b/site_scons/site_tools/mongo_benchmark.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering benchmarks.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -54,9 +54,8 @@ def build_benchmark(env, target, source, **kwargs):
benchmark_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- benchmark_test_components
- )
+ benchmark_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(benchmark_test_components)
kwargs["AIB_COMPONENTS_EXTRA"] = list(benchmark_test_components)
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index cbaadeb610d..af400ab805e 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""
Pseudo-builders for building and registering integration tests.
"""
@@ -27,6 +26,7 @@ from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -48,9 +48,8 @@ def build_cpp_integration_test(env, target, source, **kwargs):
integration_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- integration_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(integration_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(integration_test_components)
diff --git a/site_scons/site_tools/mongo_libfuzzer.py b/site_scons/site_tools/mongo_libfuzzer.py
index bcbc0412688..90a0db807c7 100644
--- a/site_scons/site_tools/mongo_libfuzzer.py
+++ b/site_scons/site_tools/mongo_libfuzzer.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering libfuzzer tests.
"""
from SCons.Script import Action
@@ -48,17 +47,14 @@ def build_cpp_libfuzzer_test(env, target, source, **kwargs):
myenv.Prepend(LINKFLAGS=[sanitizer_option])
libfuzzer_test_components = {"tests", "fuzzertests"}
- if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith(
- "-fuzzertest"
- ):
+ if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith("-fuzzertest"):
kwargs["AIB_COMPONENT"] += "-fuzzertest"
if "AIB_COMPONENTS_EXTRA" in kwargs:
- libfuzzer_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- libfuzzer_test_components
- )
+ libfuzzer_test_components = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(libfuzzer_test_components)
- kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
+ kwargs["AIB_COMPONENTS_EXTRA"] = list(libfuzzer_test_components)
# Fuzzer tests are inherenently undecidable (see
# mongo_test_execution.py for details on undecidability).
diff --git a/site_scons/site_tools/mongo_test_execution.py b/site_scons/site_tools/mongo_test_execution.py
index 2527ba63a12..5c233ef97d0 100644
--- a/site_scons/site_tools/mongo_test_execution.py
+++ b/site_scons/site_tools/mongo_test_execution.py
@@ -29,6 +29,7 @@ import auto_install_binaries
_proof_scanner_cache_key = "proof_scanner_cache"
_associated_proof = "associated_proof_key"
+
def proof_generator_command_scanner_func(node, env, path):
results = getattr(node.attributes, _proof_scanner_cache_key, None)
if results is not None:
@@ -37,20 +38,22 @@ def proof_generator_command_scanner_func(node, env, path):
setattr(node.attributes, _proof_scanner_cache_key, results)
return results
+
proof_generator_command_scanner = SCons.Scanner.Scanner(
function=proof_generator_command_scanner_func,
path_function=None,
- recursive=True
+ recursive=True,
)
+
def auto_prove_task(env, component, role):
entry = auto_install_binaries.get_alias_map_entry(env, component, role)
return [
- getattr(f.attributes, _associated_proof)
- for f in entry.files
+ getattr(f.attributes, _associated_proof) for f in entry.files
if hasattr(f.attributes, _associated_proof)
]
+
def generate_test_execution_aliases(env, test):
installed = [test]
if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test):
@@ -90,7 +93,8 @@ def generate_test_execution_aliases(env, test):
verbose_source_command = test_env.Command(
target=f"#+{target_name}-{source_name}",
source=installed[0],
- action="$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
+ action=
+ "$( $ICERUN $) ${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS",
TEST_SOURCE_FILE_NAME=source_name,
NINJA_POOL="console",
)
@@ -102,7 +106,10 @@ def generate_test_execution_aliases(env, test):
alias = env.Alias(f'+{source_name}', verbose_source_command)
if len(alias[0].children()) > 1:
- raise SCons.Errors.BuildError(alias[0].children()[0], f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique.")
+ raise SCons.Errors.BuildError(
+ alias[0].children()[0],
+ f"Multiple unit test programs contain a source file named '{source_name}' which would result in an ambiguous test execution alias. Unit test source filenames are required to be globally unique."
+ )
proof_generator_command = test_env.Command(
target=[
@@ -110,11 +117,8 @@ def generate_test_execution_aliases(env, test):
'${SOURCE}.status',
],
source=installed[0],
- action=SCons.Action.Action(
- "$PROOF_GENERATOR_COMMAND",
- "$PROOF_GENERATOR_COMSTR"
- ),
- source_scanner=proof_generator_command_scanner
+ action=SCons.Action.Action("$PROOF_GENERATOR_COMMAND", "$PROOF_GENERATOR_COMSTR"),
+ source_scanner=proof_generator_command_scanner,
)
# We assume tests are provable by default, but some tests may not
@@ -128,10 +132,7 @@ def generate_test_execution_aliases(env, test):
proof_analyzer_command = test_env.Command(
target='${SOURCES[1].base}.proof',
source=proof_generator_command,
- action=SCons.Action.Action(
- "$PROOF_ANALYZER_COMMAND",
- "$PROOF_ANALYZER_COMSTR"
- )
+ action=SCons.Action.Action("$PROOF_ANALYZER_COMMAND", "$PROOF_ANALYZER_COMSTR"),
)
proof_analyzer_alias = env.Alias(
@@ -143,6 +144,7 @@ def generate_test_execution_aliases(env, test):
# TODO: Should we enable proof at the file level?
+
def exists(env):
return True
@@ -153,14 +155,13 @@ def generate(env):
env.AddMethod(generate_test_execution_aliases, "GenerateTestExecutionAliases")
env["TEST_EXECUTION_SUFFIX_DENYLIST"] = env.get(
- "TEST_EXECUTION_SUFFIX_DENYLIST", [".in"]
+ "TEST_EXECUTION_SUFFIX_DENYLIST",
+ [".in"],
)
- env.AppendUnique(
- AIB_TASKS={
- "prove": (auto_prove_task, False),
- }
- )
+ env.AppendUnique(AIB_TASKS={
+ "prove": (auto_prove_task, False),
+ })
# TODO: Should we have some sort of prefix_xdir for the output location for these? Something like
# $PREFIX_VARCACHE and which in our build is pre-populated to $PREFIX/var/cache/mongo or similar?
diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py
index 1b02c52eb8e..a000c85cca2 100644
--- a/site_scons/site_tools/mongo_test_list.py
+++ b/site_scons/site_tools/mongo_test_list.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building test lists for Resmoke"""
import SCons
@@ -63,9 +62,9 @@ def test_list_builder_action(env, target, source):
TEST_LIST_BUILDER = SCons.Builder.Builder(
action=SCons.Action.FunctionAction(
- test_list_builder_action, {"cmdstr": "Generating $TARGETS"},
- )
-)
+ test_list_builder_action,
+ {"cmdstr": "Generating $TARGETS"},
+ ))
def exists(env):
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index 33373282606..f06a64e191d 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -19,12 +19,12 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Pseudo-builders for building and registering unit tests."""
from SCons.Script import Action
from site_scons.mongo import insort_wrapper
+
def exists(env):
return True
@@ -53,9 +53,8 @@ def build_cpp_unit_test(env, target, source, **kwargs):
unit_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
- kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
- unit_test_components
- )
+ kwargs["AIB_COMPONENTS_EXTRA"] = set(
+ kwargs["AIB_COMPONENTS_EXTRA"]).union(unit_test_components)
else:
kwargs["AIB_COMPONENTS_EXTRA"] = list(unit_test_components)
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 1c76bd92478..df42f03f884 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -19,7 +19,6 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-
"""Generate build.ninja files from SCons aliases."""
import sys
@@ -79,9 +78,10 @@ def _mkdir_action_function(env, node):
# to an invalid ninja file.
"variables": {
# On Windows mkdir "-p" is always on
- "cmd": "mkdir {args}".format(
- args = ' '.join(get_outputs(node)) + " & exit /b 0" if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)),
- ),
+ "cmd":
+ "mkdir {args}".format(
+ args=' '.join(get_outputs(node)) + " & exit /b 0"
+ if env["PLATFORM"] == "win32" else "-p " + ' '.join(get_outputs(node)), ),
},
}
@@ -130,9 +130,7 @@ def alias_to_ninja_build(node):
return {
"outputs": get_outputs(node),
"rule": "phony",
- "implicit": [
- get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)
- ],
+ "implicit": [get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)],
}
@@ -140,18 +138,22 @@ def get_order_only(node):
"""Return a list of order only dependencies for node."""
if node.prerequisites is None:
return []
- return [get_path(src_file(prereq)) for prereq in node.prerequisites if is_valid_dependent_node(prereq)]
+ return [
+ get_path(src_file(prereq)) for prereq in node.prerequisites
+ if is_valid_dependent_node(prereq)
+ ]
def get_dependencies(node, skip_sources=False):
"""Return a list of dependencies for node."""
if skip_sources:
return [
- get_path(src_file(child))
- for child in node.children()
+ get_path(src_file(child)) for child in node.children()
if child not in node.sources and is_valid_dependent_node(child)
]
- return [get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)]
+ return [
+ get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)
+ ]
def get_inputs(node, skip_unknown_types=False):
@@ -180,10 +182,12 @@ def get_inputs(node, skip_unknown_types=False):
else:
if skip_unknown_types:
continue
- raise Exception("Can't process {} node '{}' as an input for '{}'".format(
- type(input_node),
- str(input_node),
- str(node)))
+ raise Exception(
+ "Can't process {} node '{}' as an input for '{}'".format(
+ type(input_node),
+ str(input_node),
+ str(node),
+ ), )
# convert node items into raw paths/aliases for ninja
return [get_path(src_file(o)) for o in ninja_nodes]
@@ -204,6 +208,7 @@ def get_outputs(node):
return outputs
+
def generate_depfile(env, node, dependencies):
"""
Ninja tool function for writing a depfile. The depfile should include
@@ -239,6 +244,7 @@ def generate_depfile(env, node, dependencies):
with open(depfile, 'w') as f:
f.write(depfile_contents)
+
class SConsToNinjaTranslator:
"""Translates SCons Actions into Ninja build objects."""
@@ -330,25 +336,19 @@ class SConsToNinjaTranslator:
if handler is not None:
return handler(node.env if node.env else self.env, node)
- raise Exception(
- "Found unhandled function action {}, "
- " generating scons command to build\n"
- "Note: this is less efficient than Ninja,"
- " you can write your own ninja build generator for"
- " this function using NinjaRegisterFunctionHandler".format(name)
- )
+ raise Exception("Found unhandled function action {}, "
+ " generating scons command to build\n"
+ "Note: this is less efficient than Ninja,"
+ " you can write your own ninja build generator for"
+ " this function using NinjaRegisterFunctionHandler".format(name))
# pylint: disable=too-many-branches
def handle_list_action(self, node, action):
"""TODO write this comment"""
results = [
- self.action_to_ninja_build(node, action=act)
- for act in action.list
- if act is not None
- ]
- results = [
- result for result in results if result is not None and result["outputs"]
+ self.action_to_ninja_build(node, action=act) for act in action.list if act is not None
]
+ results = [result for result in results if result is not None and result["outputs"]]
if not results:
return None
@@ -450,22 +450,28 @@ class NinjaState:
scons_escape = env.get("ESCAPE", lambda x: x)
self.variables = {
- "COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
- "NOOP": "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
- "SCONS_INVOCATION": "{} {} __NINJA_NO=1 $out".format(
- sys.executable,
- " ".join(
- [ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS]
+ "COPY":
+ "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
+ "NOOP":
+ "cmd.exe /c 1>NUL echo 0" if sys.platform == "win32" else "echo 0 >/dev/null",
+ "SCONS_INVOCATION":
+ "{} {} __NINJA_NO=1 $out".format(
+ sys.executable,
+ " ".join([
+ ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv
+ if arg not in COMMAND_LINE_TARGETS
+ ]),
),
- ),
- "SCONS_INVOCATION_W_TARGETS": "{} {}".format(
- sys.executable, " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])
- ),
+ "SCONS_INVOCATION_W_TARGETS":
+ "{} {}".format(
+ sys.executable,
+ " ".join([ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv])),
# This must be set to a global default per:
# https://ninja-build.org/manual.html
#
# (The deps section)
- "msvc_deps_prefix": "Note: including file:",
+ "msvc_deps_prefix":
+ "Note: including file:",
}
self.rules = {
@@ -505,20 +511,21 @@ class NinjaState:
# to do the same. See related for more info:
# https://jira.mongodb.org/browse/SERVER-49457
"AR": {
- "command": "{}$env$AR @$out.rsp".format(
- '' if sys.platform == "win32" else "rm -f $out && "
- ),
- "description": "Archiving $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- "pool": "local_pool",
+ "command":
+ "{}$env$AR @$out.rsp".format('' if sys.platform == "win32" else "rm -f $out && "
+ ),
+ "description":
+ "Archiving $out",
+ "rspfile":
+ "$out.rsp",
+ "rspfile_content":
+ "$rspc",
+ "pool":
+ "local_pool",
},
"SYMLINK": {
"command": (
- "cmd /c mklink $out $in"
- if sys.platform == "win32"
- else "ln -s $in $out"
- ),
+ "cmd /c mklink $out $in" if sys.platform == "win32" else "ln -s $in $out"),
"description": "Symlink $in -> $out",
},
"NOOP": {
@@ -678,20 +685,17 @@ class NinjaState:
if generated_sources_alias and generated_sources_build:
generated_source_files = sorted(
- [] if not generated_sources_build else generated_sources_build['implicit']
- )
+ [] if not generated_sources_build else generated_sources_build['implicit'])
+
def check_generated_source_deps(build):
- return (
- build != generated_sources_build
- and set(build["outputs"]).isdisjoint(generated_source_files)
- )
+ return (build != generated_sources_build
+ and set(build["outputs"]).isdisjoint(generated_source_files))
else:
generated_sources_build = None
generated_source_files = sorted({
output
# First find builds which have header files in their outputs.
- for build in self.builds.values()
- if self.has_generated_sources(build["outputs"])
+ for build in self.builds.values() if self.has_generated_sources(build["outputs"])
for output in build["outputs"]
# Collect only the header files from the builds with them
# in their output. We do this because is_generated_source
@@ -706,14 +710,13 @@ class NinjaState:
ninja.build(
outputs=generated_sources_alias,
rule="phony",
- implicit=generated_source_files
+ implicit=generated_source_files,
)
+
def check_generated_source_deps(build):
- return (
- not build["rule"] == "INSTALL"
- and set(build["outputs"]).isdisjoint(generated_source_files)
- and set(build.get("implicit", [])).isdisjoint(generated_source_files)
- )
+ return (not build["rule"] == "INSTALL"
+ and set(build["outputs"]).isdisjoint(generated_source_files)
+ and set(build.get("implicit", [])).isdisjoint(generated_source_files))
template_builders = []
@@ -730,10 +733,7 @@ class NinjaState:
# sources and none of the direct implicit dependencies are
# generated sources or else we will create a dependency
# cycle.
- if (
- generated_source_files
- and check_generated_source_deps(build)
- ):
+ if (generated_source_files and check_generated_source_deps(build)):
# Make all non-generated source targets depend on
# _generated_sources. We use order_only for generated
@@ -787,7 +787,9 @@ class NinjaState:
if remaining_outputs:
ninja.build(
- outputs=sorted(remaining_outputs), rule="phony", implicit=first_output,
+ outputs=sorted(remaining_outputs),
+ rule="phony",
+ implicit=first_output,
)
build["outputs"] = first_output
@@ -799,7 +801,8 @@ class NinjaState:
# be repurposed for anything, as long as you have a way to regenerate the depfile.
# More specific info can be found here: https://ninja-build.org/manual.html#_depfile
if rule is not None and rule.get('depfile') and build.get('deps_files'):
- path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
+ path = build['outputs'] if SCons.Util.is_List(
+ build['outputs']) else [build['outputs']]
generate_depfile(self.env, path[0], build.pop('deps_files', []))
if "inputs" in build:
@@ -842,7 +845,8 @@ class NinjaState:
# list of build generation about. However, because the generate rule
# is hardcoded here, we need to do this generate_depfile call manually.
ninja_file_path = self.env.File(ninja_file).path
- ninja_in_file_path = os.path.join(get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
+ ninja_in_file_path = os.path.join(
+ get_path(self.env['NINJA_BUILDDIR']), os.path.basename(ninja_file)) + ".in"
generate_depfile(
self.env,
ninja_in_file_path,
@@ -876,23 +880,23 @@ class NinjaState:
pool="console",
implicit=[ninja_file],
variables={
- "cmd": "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
- ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else ''
- )
+ "cmd":
+ "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
+ ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else '')
},
order_only=[generated_sources_alias],
)
ninja.build(
- "compiledb", rule="phony", implicit=["compile_commands.json"],
+ "compiledb",
+ rule="phony",
+ implicit=["compile_commands.json"],
)
# Look in SCons's list of DEFAULT_TARGETS, find the ones that
# we generated a ninja build rule for.
scons_default_targets = [
- get_path(tgt)
- for tgt in SCons.Script.DEFAULT_TARGETS
- if get_path(tgt) in self.built
+ get_path(tgt) for tgt in SCons.Script.DEFAULT_TARGETS if get_path(tgt) in self.built
]
# If we found an overlap between SCons's list of default
@@ -972,8 +976,7 @@ def get_command_env(env, target, source):
ENV = env.get('SHELL_ENV_GENERATOR', get_default_ENV)(env, target, source)
scons_specified_env = {
key: value
- for key, value in ENV.items()
- if key not in os.environ or os.environ.get(key, None) != value
+ for key, value in ENV.items() if key not in os.environ or os.environ.get(key, None) != value
}
windows = env["PLATFORM"] == "win32"
@@ -1002,7 +1005,8 @@ def get_command_env(env, target, source):
# doesn't make builds on paths with spaces (Ninja and SCons issues)
# nor expanding response file paths with spaces (Ninja issue) work.
value = value.replace(r' ', r'$ ')
- command_env += "export {}='{}';".format(key, env.subst(value, target=target, source=source))
+ command_env += "export {}='{}';".format(key,
+ env.subst(value, target=target, source=source))
env["NINJA_ENV_VAR_CACHE"] = command_env
return command_env
@@ -1030,15 +1034,11 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
cmd_list, _, _ = action.process(targets, sources, env, executor=executor)
cmd_list = [str(c).replace("$", "$$") for c in cmd_list[0]]
else:
- command = generate_command(
- env, node, action, targets, sources, executor=executor
- )
+ command = generate_command(env, node, action, targets, sources, executor=executor)
cmd_list = shlex.split(command)
if tool_is_dynamic:
- tool_command = env.subst(
- tool, target=targets, source=sources, executor=executor
- )
+ tool_command = env.subst(tool, target=targets, source=sources, executor=executor)
else:
tool_command = tool
@@ -1046,11 +1046,8 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
# Add 1 so we always keep the actual tool inside of cmd
tool_idx = cmd_list.index(tool_command) + 1
except ValueError:
- raise Exception(
- "Could not find tool {} in {} generated from {}".format(
- tool, cmd_list, get_comstr(env, action, targets, sources)
- )
- )
+ raise Exception("Could not find tool {} in {} generated from {}".format(
+ tool, cmd_list, get_comstr(env, action, targets, sources)))
cmd, rsp_content = cmd_list[:tool_idx], cmd_list[tool_idx:]
rsp_content = " ".join(rsp_content)
@@ -1062,7 +1059,10 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False, custom
for key, value in custom_env.items():
variables["env"] += env.subst(
- f"export {key}={value};", target=targets, source=sources, executor=executor
+ f"export {key}={value};",
+ target=targets,
+ source=sources,
+ executor=executor,
) + " "
return rule, variables, [tool_command]
@@ -1114,7 +1114,7 @@ def get_generic_shell_command(env, node, action, targets, sources, executor=None
# generally this function will not be used soley and is more like a template to generate
# the basics for a custom provider which may have more specific options for a provier
# function for a custom NinjaRuleMapping.
- []
+ [],
)
@@ -1151,7 +1151,14 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
return None
provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command)
- rule, variables, provider_deps = provider(sub_env, node, action, tlist, slist, executor=executor)
+ rule, variables, provider_deps = provider(
+ sub_env,
+ node,
+ action,
+ tlist,
+ slist,
+ executor=executor,
+ )
# Get the dependencies for all targets
implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)})
@@ -1174,7 +1181,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# in some case the tool could be in the local directory and be suppled without the ext
# such as in windows, so append the executable suffix and check.
prog_suffix = sub_env.get('PROGSUFFIX', '')
- provider_dep_ext = provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix
+ provider_dep_ext = provider_dep if provider_dep.endswith(
+ prog_suffix) else provider_dep + prog_suffix
if os.path.exists(provider_dep_ext):
implicit.append(provider_dep_ext)
continue
@@ -1182,7 +1190,8 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
# Many commands will assume the binary is in the path, so
# we accept this as a possible input from a given command.
- provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(provider_dep, path=os.environ["PATH"])
+ provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(
+ provider_dep, path=os.environ["PATH"])
if provider_dep_abspath:
implicit.append(provider_dep_abspath)
continue
@@ -1262,7 +1271,8 @@ def register_custom_rule_mapping(env, pre_subst_string, rule):
__NINJA_RULE_MAPPING[pre_subst_string] = rule
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False, use_response_file=False, response_file_content="$rspc"):
+def register_custom_rule(env, rule, command, description="", deps=None, pool=None,
+ use_depfile=False, use_response_file=False, response_file_content="$rspc"):
"""Allows specification of Ninja rules from inside SCons files."""
rule_obj = {
"command": command,
@@ -1289,10 +1299,12 @@ def register_custom_pool(env, pool, size):
"""Allows the creation of custom Ninja pools"""
env[NINJA_POOLS][pool] = size
+
def set_build_node_callback(env, node, callback):
if 'conftest' not in str(node):
setattr(node.attributes, "ninja_build_callback", callback)
+
def ninja_csig(original):
"""Return a dummy csig"""
@@ -1316,6 +1328,7 @@ def ninja_contents(original):
return wrapper
+
def CheckNinjaCompdbExpand(env, context):
""" Configure check testing if ninja's compdb can expand response files"""
@@ -1333,11 +1346,13 @@ def CheckNinjaCompdbExpand(env, context):
cmd = echo
pool = console
rspc = "test"
- """))
+ """),
+ )
result = '@fake_output.txt.rsp' not in output
context.Result(result)
return result
+
def ninja_stat(_self, path):
"""
Eternally memoized stat call.
@@ -1464,9 +1479,13 @@ def generate(env):
# exists upstream: https://github.com/SCons/scons/issues/3625
def ninja_generate_deps(env):
return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True))
+
env['_NINJA_REGENERATE_DEPS_FUNC'] = ninja_generate_deps
- env['NINJA_REGENERATE_DEPS'] = env.get('NINJA_REGENERATE_DEPS', '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}')
+ env['NINJA_REGENERATE_DEPS'] = env.get(
+ 'NINJA_REGENERATE_DEPS',
+ '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}',
+ )
# This adds the required flags such that the generated compile
# commands will create depfiles as appropriate in the Ninja file.
@@ -1515,12 +1534,8 @@ def generate(env):
from SCons.Tool.mslink import compositeLinkAction
if env["LINKCOM"] == compositeLinkAction:
- env[
- "LINKCOM"
- ] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
- env[
- "SHLINKCOM"
- ] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
+ env["LINKCOM"] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
+ env["SHLINKCOM"] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
# Normally in SCons actions for the Program and *Library builders
# will return "${*COM}" as their pre-subst'd command line. However
@@ -1612,12 +1627,8 @@ def generate(env):
# slows down the build significantly and we don't need contents or
# content signatures calculated when generating a ninja file since
# we're not doing any SCons caching or building.
- SCons.Executor.Executor.get_contents = ninja_contents(
- SCons.Executor.Executor.get_contents
- )
- SCons.Node.Alias.Alias.get_contents = ninja_contents(
- SCons.Node.Alias.Alias.get_contents
- )
+ SCons.Executor.Executor.get_contents = ninja_contents(SCons.Executor.Executor.get_contents)
+ SCons.Node.Alias.Alias.get_contents = ninja_contents(SCons.Node.Alias.Alias.get_contents)
SCons.Node.FS.File.get_contents = ninja_contents(SCons.Node.FS.File.get_contents)
SCons.Node.FS.File.get_csig = ninja_csig(SCons.Node.FS.File.get_csig)
SCons.Node.FS.Dir.get_csig = ninja_csig(SCons.Node.FS.Dir.get_csig)
@@ -1689,9 +1700,10 @@ def generate(env):
try:
emitter = builder.emitter
if emitter is not None:
- builder.emitter = SCons.Builder.ListEmitter(
- [emitter, ninja_file_depends_on_all]
- )
+ builder.emitter = SCons.Builder.ListEmitter([
+ emitter,
+ ninja_file_depends_on_all,
+ ], )
else:
builder.emitter = ninja_file_depends_on_all
# Users can inject whatever they want into the BUILDERS
diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py
index 677ef75723e..08c78f4ef32 100644
--- a/site_scons/site_tools/separate_debug.py
+++ b/site_scons/site_tools/separate_debug.py
@@ -34,15 +34,15 @@ def _update_builder(env, builder):
if origin is not None:
origin_results = old_scanner(origin, env, path)
for origin_result in origin_results:
- origin_result_debug_files = getattr(
- origin_result.attributes, "separate_debug_files", None
- )
+ origin_result_debug_files = getattr(origin_result.attributes,
+ "separate_debug_files", None)
if origin_result_debug_files is not None:
results.extend(origin_result_debug_files)
return results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=old_path_function,
+ function=new_scanner,
+ path_function=old_path_function,
)
base_action = builder.action
@@ -57,31 +57,27 @@ def _update_builder(env, builder):
# setup from the etc/scons/xcode_*.vars files, which would be a
# win as well.
if env.TargetOSIs("darwin"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
- "$DSYMUTILCOMSTR"
- ),
- SCons.Action.Action(
- "$STRIP -S ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$DSYMUTIL -num-threads 1 $TARGET -o ${TARGET}.dSYM",
+ "$DSYMUTILCOMSTR",
+ ),
+ SCons.Action.Action(
+ "$STRIP -S ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
elif env.TargetOSIs("posix"):
- base_action.list.extend(
- [
- SCons.Action.Action(
- "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
- "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR"
- ),
- SCons.Action.Action(
- "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
- "$DEBUGSTRIPCOMSTR"
- ),
- ]
- )
+ base_action.list.extend([
+ SCons.Action.Action(
+ "$OBJCOPY --only-keep-debug $TARGET ${TARGET}.debug",
+ "$OBJCOPY_ONLY_KEEP_DEBUG_COMSTR",
+ ),
+ SCons.Action.Action(
+ "$OBJCOPY --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}",
+ "$DEBUGSTRIPCOMSTR",
+ ),
+ ])
else:
pass
@@ -109,13 +105,15 @@ def _update_builder(env, builder):
plist_file = env.File("Contents/Info.plist", directory=dsym_dir)
setattr(plist_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(plist_file.attributes, "aib_additional_directory", "{}/Contents".format(dsym_dir_name))
+ setattr(plist_file.attributes, "aib_additional_directory",
+ "{}/Contents".format(dsym_dir_name))
dwarf_dir = env.Dir("Contents/Resources/DWARF", directory=dsym_dir)
dwarf_file = env.File(target0.name, directory=dwarf_dir)
setattr(dwarf_file.attributes, "aib_effective_suffix", ".dSYM")
- setattr(dwarf_file.attributes, "aib_additional_directory", "{}/Contents/Resources/DWARF".format(dsym_dir_name))
+ setattr(dwarf_file.attributes, "aib_additional_directory",
+ "{}/Contents/Resources/DWARF".format(dsym_dir_name))
debug_files.extend([plist_file, dwarf_file])
@@ -174,8 +172,10 @@ def generate(env):
if not env.Verbose():
env.Append(
- OBJCOPY_ONLY_KEEP_DEBUG_COMSTR="Generating debug info for $TARGET into ${TARGET}.dSYM",
- DEBUGSTRIPCOMSTR="Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
+ OBJCOPY_ONLY_KEEP_DEBUG_COMSTR=
+ "Generating debug info for $TARGET into ${TARGET}.dSYM",
+ DEBUGSTRIPCOMSTR=
+ "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug",
)
for builder in ["Program", "SharedLibrary", "LoadableModule"]:
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
index 710d828945a..72316dfb968 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -66,7 +66,10 @@ def generate(env):
if not suffix in suffixes:
continue
base = emitterdict[suffix]
- emitterdict[suffix] = SCons.Builder.ListEmitter([base, _dwo_emitter,])
+ emitterdict[suffix] = SCons.Builder.ListEmitter([
+ base,
+ _dwo_emitter,
+ ])
def exists(env):
diff --git a/site_scons/site_tools/tapilink.py b/site_scons/site_tools/tapilink.py
index 0521767fc06..d2fc4b8c340 100644
--- a/site_scons/site_tools/tapilink.py
+++ b/site_scons/site_tools/tapilink.py
@@ -26,6 +26,7 @@ import subprocess
# TODO: DRY this with abilink.py by moving duplicated code out to a common
# support module.
+
def _detect(env):
try:
tapi = env["TAPI"]
@@ -70,9 +71,11 @@ def _add_scanner(builder):
return (getattr(env.Entry(o).attributes, "tbd", o) for o in old_scanner(node, env, path))
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
+
def _add_action(builder):
actions = builder.action
@@ -83,12 +86,11 @@ def _add_action(builder):
# invoking TAPI proves to be expensive, we could address this by
# instead post-processing the "real" .tbd file to strip out the
# UUID, and then potentially even feed it into a hash algorithm.
- builder.action = actions + SCons.Action.Action(
- [
- "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
- "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}"
- ]
- )
+ builder.action = actions + SCons.Action.Action([
+ "$TAPI stubify -o ${TARGET.base}.tbd ${TARGET}",
+ "$TAPI stubify --no-uuids -o ${TARGET.base}.tbd.no_uuid ${TARGET}",
+ ])
+
def exists(env):
result = _detect(env) != None
diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py
index 5700996a054..7d34a6bfd37 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -92,7 +92,8 @@ def _add_scanner(builder):
return new_results
builder.target_scanner = SCons.Scanner.Scanner(
- function=new_scanner, path_function=path_function
+ function=new_scanner,
+ path_function=path_function,
)
@@ -101,8 +102,7 @@ def generate(env):
return
env["ARFLAGS"] = SCons.Util.CLVar(
- [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]]
- )
+ [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]])
# Disable running ranlib, since we added 's' above
env["RANLIBCOM"] = ""
diff --git a/site_scons/site_tools/validate_cache_dir.py b/site_scons/site_tools/validate_cache_dir.py
index b5faee9b3e0..3bd07462ade 100644
--- a/site_scons/site_tools/validate_cache_dir.py
+++ b/site_scons/site_tools/validate_cache_dir.py
@@ -29,19 +29,21 @@ import shutil
import tempfile
import traceback
-
import SCons
cache_debug_suffix = " (target: %s, cachefile: %s) "
+
class InvalidChecksum(SCons.Errors.BuildError):
def __init__(self, src, dst, reason, cache_csig='', computed_csig=''):
self.message = f"ERROR: md5 checksum {reason} for {src} ({dst})"
self.cache_csig = cache_csig
self.computed_csig = computed_csig
+
def __str__(self):
return self.message
+
class CacheTransferFailed(SCons.Errors.BuildError):
def __init__(self, src, dst, reason):
self.message = f"ERROR: cachedir transfer {reason} while transfering {src} to {dst}"
@@ -49,6 +51,7 @@ class CacheTransferFailed(SCons.Errors.BuildError):
def __str__(self):
return self.message
+
class UnsupportedError(SCons.Errors.BuildError):
def __init__(self, class_name, feature):
self.message = f"{class_name} does not support {feature}"
@@ -56,8 +59,8 @@ class UnsupportedError(SCons.Errors.BuildError):
def __str__(self):
return self.message
-class CacheDirValidate(SCons.CacheDir.CacheDir):
+class CacheDirValidate(SCons.CacheDir.CacheDir):
def __init__(self, path):
self.json_log = None
super().__init__(path)
@@ -70,7 +73,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
@staticmethod
def get_file_contents_path(default_cachefile_path):
- return pathlib.Path(default_cachefile_path) / pathlib.Path(default_cachefile_path).name.split('.')[0]
+ return pathlib.Path(default_cachefile_path) / pathlib.Path(
+ default_cachefile_path).name.split('.')[0]
@staticmethod
def get_bad_cachefile_path(cksum_cachefile_dir):
@@ -96,17 +100,20 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
src_file = cls.get_file_contents_path(src)
# using os.path.exists here because: https://bugs.python.org/issue35306
if os.path.exists(str(cls.get_bad_cachefile_path(src))):
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"cachefile marked as bad checksum")
csig = None
try:
with open(cls.get_hash_path(src_file), 'rb') as f_out:
csig = f_out.read().decode().strip()
except OSError as ex:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"failed to read hash file: {ex}") from ex
else:
if not csig:
- raise InvalidChecksum(cls.get_hash_path(src_file), dst, f"no content_hash data found")
+ raise InvalidChecksum(
+ cls.get_hash_path(src_file), dst, f"no content_hash data found")
with tempfile.TemporaryDirectory() as tmpdirname:
dst_tmp = pathlib.Path(tmpdirname) / os.path.basename(dst)
@@ -118,11 +125,12 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
shutil.move(dst_tmp, dst)
new_csig = SCons.Util.MD5filesignature(dst,
- chunksize=SCons.Node.FS.File.md5_chunksize*1024)
+ chunksize=SCons.Node.FS.File.md5_chunksize * 1024)
if csig != new_csig:
raise InvalidChecksum(
- cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}", cache_csig=csig, computed_csig=new_csig)
+ cls.get_hash_path(src_file), dst, f"checksums don't match {csig} != {new_csig}",
+ cache_csig=csig, computed_csig=new_csig)
@classmethod
def copy_to_cache(cls, env, src, dst):
@@ -145,9 +153,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
raise CacheTransferFailed(src, dst_file, f"failed to create hash file: {ex}") from ex
def log_json_cachedebug(self, node, pushing=False):
- if (pushing
- and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
- return
+ if (pushing and (node.nocache or SCons.CacheDir.cache_readonly or 'conftest' in str(node))):
+ return
cachefile = self.get_file_contents_path(self.cachepath(node)[1])
if node.fs.exists(cachefile):
@@ -213,8 +220,8 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
self.debugFP.write(self._format_exception_msg())
def _format_exception_msg(self):
- return ('An exception was detected while using the cache:\n' +
- ' ' + "\n ".join("".join(traceback.format_exc()).split("\n"))) + '\n'
+ return ('An exception was detected while using the cache:\n' + ' ' + "\n ".join(
+ "".join(traceback.format_exc()).split("\n"))) + '\n'
def _log(self, log_msg, json_info, realnode, cachefile):
self.CacheDebug(log_msg + cache_debug_suffix, realnode, cachefile)
@@ -241,12 +248,16 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return
msg = f"Removed bad cachefile {cksum_dir} found in cache."
- self._log(msg, {
- 'type': 'invalid_checksum',
- 'cache_csig': cache_csig,
- 'computed_csig': computed_csig
- }, node, cksum_dir)
-
+ self._log(
+ msg,
+ {
+ 'type': 'invalid_checksum',
+ 'cache_csig': cache_csig,
+ 'computed_csig': computed_csig,
+ },
+ node,
+ cksum_dir,
+ )
def get_cachedir_csig(self, node):
cachedir, cachefile = self.cachepath(node)
@@ -263,9 +274,11 @@ class CacheDirValidate(SCons.CacheDir.CacheDir):
return dir, path
return dir, str(self.get_cachedir_path(path))
+
def exists(env):
return True
+
def generate(env):
if not env.get('CACHEDIR_CLASS'):
env['CACHEDIR_CLASS'] = CacheDirValidate
diff --git a/site_scons/site_tools/vcredist.py b/site_scons/site_tools/vcredist.py
index 5c8effaadc4..0f86629b281 100644
--- a/site_scons/site_tools/vcredist.py
+++ b/site_scons/site_tools/vcredist.py
@@ -137,8 +137,7 @@ def generate(env):
vs_version = int(msvc_major) + int(msvc_minor)
vs_version_next = vs_version + 1
vs_version_range = "[{vs_version}.0, {vs_version_next}.0)".format(
- vs_version=vs_version, vs_version_next=vs_version_next
- )
+ vs_version=vs_version, vs_version_next=vs_version_next)
if not programfilesx86:
programfilesx86 = _get_programfiles()
@@ -146,25 +145,19 @@ def generate(env):
return
# Use vswhere (it has a fixed stable path) to query where Visual Studio is installed.
- env["MSVS"]["VSINSTALLDIR"] = (
- subprocess.check_output(
- [
- os.path.join(
- programfilesx86,
- "Microsoft Visual Studio",
- "Installer",
- "vswhere.exe",
- ),
- "-version",
- vs_version_range,
- "-property",
- "installationPath",
- "-nologo",
- ]
- )
- .decode("utf-8")
- .strip()
- )
+ env["MSVS"]["VSINSTALLDIR"] = (subprocess.check_output([
+ os.path.join(
+ programfilesx86,
+ "Microsoft Visual Studio",
+ "Installer",
+ "vswhere.exe",
+ ),
+ "-version",
+ vs_version_range,
+ "-property",
+ "installationPath",
+ "-nologo",
+ ]).decode("utf-8").strip())
vsinstall_dir = env["MSVS"]["VSINSTALLDIR"]
@@ -179,19 +172,15 @@ def generate(env):
# TOOO: This x64 needs to be abstracted away. Is it the host
# arch, or the target arch? My guess is host.
vsruntime_key_name = "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format(
- msvc_major=msvc_major
- )
+ msvc_major=msvc_major)
vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vsruntime_key_name)
- vslib_version, vslib_version_type = winreg.QueryValueEx(
- vsruntime_key, "Version"
- )
+ vslib_version, vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version")
except WindowsError:
return
# Fallback to directory search if we don't find the expected version
- redist_path = os.path.join(
- redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1)
- )
+ redist_path = os.path.join(redist_root,
+ re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1))
if not os.path.isdir(redist_path):
redist_path = None
dirs = os.listdir(redist_root)
@@ -228,9 +217,7 @@ def generate(env):
if not expansion:
return
- vcredist_candidates = [
- c.format(expansion) for c in vcredist_search_template_sequence
- ]
+ vcredist_candidates = [c.format(expansion) for c in vcredist_search_template_sequence]
for candidate in vcredist_candidates:
candidate = os.path.join(redist_path, candidate)
if os.path.isfile(candidate):
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
index d40528d3a54..3db0aca4719 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -34,5 +34,4 @@ def generate(env):
if "DEVELOPER_DIR" in os.environ:
env["ENV"]["DEVELOPER_DIR"] = os.environ["DEVELOPER_DIR"]
print(
- "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands"
- )
+ "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands")