diff options
author | Mathew Robinson <mathew.robinson@mongodb.com> | 2020-01-07 18:48:42 +0000 |
---|---|---|
committer | evergreen <evergreen@mongodb.com> | 2020-01-07 18:48:42 +0000 |
commit | 16207c4fa75a8045f24126d768b0ba2e1ce32b1f (patch) | |
tree | 6df377ef6da81df1637ccaf95f40354f19af53d4 /site_scons | |
parent | 71658c0e2edb8c900ced5098f3f5a146a2d649ea (diff) | |
download | mongo-16207c4fa75a8045f24126d768b0ba2e1ce32b1f.tar.gz |
SERVER-44947 Allow test execution selection by test source file name
Diffstat (limited to 'site_scons')
31 files changed, 1088 insertions, 771 deletions
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py index fc93311a9f1..e3f05f6aa7b 100644 --- a/site_scons/libdeps.py +++ b/site_scons/libdeps.py @@ -57,9 +57,10 @@ import SCons.Errors import SCons.Scanner import SCons.Util -libdeps_env_var = 'LIBDEPS' -syslibdeps_env_var = 'SYSLIBDEPS' -missing_syslibdep = 'MISSING_LIBDEP_' +libdeps_env_var = "LIBDEPS" +syslibdeps_env_var = "SYSLIBDEPS" +missing_syslibdep = "MISSING_LIBDEP_" + class dependency(object): Public, Private, Interface = list(range(3)) @@ -93,13 +94,15 @@ class DependencyCycleError(SCons.Errors.UserError): self.cycle_nodes = [first_node] def __str__(self): - return "Library dependency cycle detected: " + " => ".join(str(n) for n in self.cycle_nodes) + return "Library dependency cycle detected: " + " => ".join( + str(n) for n in self.cycle_nodes + ) def __get_sorted_direct_libdeps(node): direct_sorted = getattr(node.attributes, "libdeps_direct_sorted", False) if not direct_sorted: - direct = getattr(node.attributes, 'libdeps_direct', []) + direct = getattr(node.attributes, "libdeps_direct", []) direct_sorted = sorted(direct, key=lambda t: str(t.target_node)) setattr(node.attributes, "libdeps_direct_sorted", direct_sorted) return direct_sorted @@ -111,7 +114,7 @@ def __get_libdeps(node): Computes the dependencies if they're not already cached. """ - cached_var_name = libdeps_env_var + '_cached' + cached_var_name = libdeps_env_var + "_cached" if hasattr(node.attributes, cached_var_name): return getattr(node.attributes, cached_var_name) @@ -120,7 +123,7 @@ def __get_libdeps(node): marked = set() def visit(n): - if getattr(n.target_node.attributes, 'libdeps_exploring', False): + if getattr(n.target_node.attributes, "libdeps_exploring", False): raise DependencyCycleError(n.target_node) n.target_node.attributes.libdeps_exploring = True @@ -160,25 +163,33 @@ def __get_syslibdeps(node): These are the depencencies listed with SYSLIBDEPS, and are linked using -l. """ - cached_var_name = syslibdeps_env_var + '_cached' + cached_var_name = syslibdeps_env_var + "_cached" if not hasattr(node.attributes, cached_var_name): syslibdeps = node.get_env().Flatten(node.get_env().get(syslibdeps_env_var, [])) for lib in __get_libdeps(node): - for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])): + for syslib in node.get_env().Flatten( + lib.get_env().get(syslibdeps_env_var, []) + ): if syslib: if type(syslib) is str and syslib.startswith(missing_syslibdep): - print(("Target '%s' depends on the availability of a " - "system provided library for '%s', " - "but no suitable library was found during configuration." % - (str(node), syslib[len(missing_syslibdep):]))) + print( + ( + "Target '%s' depends on the availability of a " + "system provided library for '%s', " + "but no suitable library was found during configuration." + % (str(node), syslib[len(missing_syslibdep) :]) + ) + ) node.get_env().Exit(1) syslibdeps.append(syslib) setattr(node.attributes, cached_var_name, syslibdeps) return getattr(node.attributes, cached_var_name) + def __missing_syslib(name): return missing_syslibdep + name + def update_scanner(builder): """Update the scanner for "builder" to also scan library dependencies.""" @@ -191,14 +202,16 @@ def update_scanner(builder): result = old_scanner.function(node, env, path) result.extend(__get_libdeps(node)) return result + else: path_function = None def new_scanner(node, env, path=()): return __get_libdeps(node) - builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, - path_function=path_function) + builder.target_scanner = SCons.Scanner.Scanner( + function=new_scanner, path_function=path_function + ) def get_libdeps(source, target, env, for_signature): @@ -221,8 +234,8 @@ def get_libdeps_objs(source, target, env, for_signature): def get_syslibdeps(source, target, env, for_signature): deps = __get_syslibdeps(target[0]) - lib_link_prefix = env.subst('$LIBLINKPREFIX') - lib_link_suffix = env.subst('$LIBLINKSUFFIX') + lib_link_prefix = env.subst("$LIBLINKPREFIX") + lib_link_suffix = env.subst("$LIBLINKSUFFIX") result = [] for d in deps: # Elements of syslibdeps are either strings (str or unicode), or they're File objects. @@ -231,22 +244,26 @@ def get_syslibdeps(source, target, env, for_signature): # or the compiler-specific equivalent. I.e., 'm' becomes '-lm', but 'File("m.a") is passed # through whole cloth. if type(d) is str: - result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix)) + result.append("%s%s%s" % (lib_link_prefix, d, lib_link_suffix)) else: result.append(d) return result + def __append_direct_libdeps(node, prereq_nodes): # We do not bother to decorate nodes that are not actual Objects if type(node) == str: return - if getattr(node.attributes, 'libdeps_direct', None) is None: + if getattr(node.attributes, "libdeps_direct", None) is None: node.attributes.libdeps_direct = [] node.attributes.libdeps_direct.extend(prereq_nodes) -def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibility_ignored, - ignore_progdeps=False): +def make_libdeps_emitter( + dependency_builder, + dependency_map=dependency_visibility_ignored, + ignore_progdeps=False, +): def libdeps_emitter(target, source, env): """SCons emitter that takes values from the LIBDEPS environment variable and converts them to File node objects, binding correct path information into @@ -265,29 +282,35 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit of the "target" list is made a prerequisite of the elements of LIBDEPS_DEPENDENTS. """ - lib_builder = env['BUILDERS'][dependency_builder] + lib_builder = env["BUILDERS"][dependency_builder] lib_node_factory = lib_builder.target_factory or env.File - prog_builder = env['BUILDERS']['Program'] + prog_builder = env["BUILDERS"]["Program"] prog_node_factory = prog_builder.target_factory or env.File prereqs = [ - dependency(l, dependency_map[dependency.Public]) for l in env.get(libdeps_env_var, []) + dependency(l, dependency_map[dependency.Public]) + for l in env.get(libdeps_env_var, []) if l ] prereqs.extend( dependency(l, dependency_map[dependency.Interface]) - for l in env.get(libdeps_env_var + '_INTERFACE', []) if l) + for l in env.get(libdeps_env_var + "_INTERFACE", []) + if l + ) prereqs.extend( dependency(l, dependency_map[dependency.Private]) - for l in env.get(libdeps_env_var + '_PRIVATE', []) if l) + for l in env.get(libdeps_env_var + "_PRIVATE", []) + if l + ) lib_builder_prefix = lib_builder.get_prefix(env) lib_builder_suffix = lib_builder.get_suffix(env) for prereq in prereqs: - prereqWithIxes = SCons.Util.adjustixes(prereq.target_node, lib_builder_prefix, - lib_builder_suffix) + prereqWithIxes = SCons.Util.adjustixes( + prereq.target_node, lib_builder_prefix, lib_builder_suffix + ) prereq.target_node = lib_node_factory(prereqWithIxes) for t in target: @@ -295,7 +318,7 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit # work properly. __append_direct_libdeps(t, prereqs) - for dependent in env.get('LIBDEPS_DEPENDENTS', []): + for dependent in env.get("LIBDEPS_DEPENDENTS", []): if dependent is None: continue @@ -304,17 +327,19 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit visibility = dependent[1] dependent = dependent[0] - dependentWithIxes = SCons.Util.adjustixes(dependent, lib_builder_prefix, - lib_builder_suffix) + dependentWithIxes = SCons.Util.adjustixes( + dependent, lib_builder_prefix, lib_builder_suffix + ) dependentNode = lib_node_factory(dependentWithIxes) - __append_direct_libdeps(dependentNode, - [dependency(target[0], dependency_map[visibility])]) + __append_direct_libdeps( + dependentNode, [dependency(target[0], dependency_map[visibility])] + ) prog_builder_prefix = prog_builder.get_prefix(env) prog_builder_suffix = prog_builder.get_suffix(env) if not ignore_progdeps: - for dependent in env.get('PROGDEPS_DEPENDENTS', []): + for dependent in env.get("PROGDEPS_DEPENDENTS", []): if dependent is None: continue @@ -324,11 +349,13 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit visibility = dependent[1] dependent = dependent[0] - dependentWithIxes = SCons.Util.adjustixes(dependent, prog_builder_prefix, - prog_builder_suffix) + dependentWithIxes = SCons.Util.adjustixes( + dependent, prog_builder_prefix, prog_builder_suffix + ) dependentNode = prog_node_factory(dependentWithIxes) - __append_direct_libdeps(dependentNode, - [dependency(target[0], dependency_map[visibility])]) + __append_direct_libdeps( + dependentNode, [dependency(target[0], dependency_map[visibility])] + ) return target, source @@ -337,7 +364,7 @@ def make_libdeps_emitter(dependency_builder, dependency_map=dependency_visibilit def expand_libdeps_tags(source, target, env, for_signature): results = [] - for expansion in env.get('LIBDEPS_TAG_EXPANSIONS', []): + for expansion in env.get("LIBDEPS_TAG_EXPANSIONS", []): results.append(expansion(source, target, env, for_signature)) return results @@ -376,14 +403,14 @@ def setup_environment(env, emitting_shared=False): """Set up the given build environment to do LIBDEPS tracking.""" try: - env['_LIBDEPS'] + env["_LIBDEPS"] except KeyError: - env['_LIBDEPS'] = '$_LIBDEPS_LIBS' + env["_LIBDEPS"] = "$_LIBDEPS_LIBS" - env['_LIBDEPS_TAGS'] = expand_libdeps_tags - env['_LIBDEPS_GET_LIBS'] = get_libdeps - env['_LIBDEPS_OBJS'] = get_libdeps_objs - env['_SYSLIBDEPS'] = get_syslibdeps + env["_LIBDEPS_TAGS"] = expand_libdeps_tags + env["_LIBDEPS_GET_LIBS"] = get_libdeps + env["_LIBDEPS_OBJS"] = get_libdeps_objs + env["_SYSLIBDEPS"] = get_syslibdeps env[libdeps_env_var] = SCons.Util.CLVar() env[syslibdeps_env_var] = SCons.Util.CLVar() @@ -401,29 +428,34 @@ def setup_environment(env, emitting_shared=False): return indirect_emitter env.Append( - LIBDEPS_LIBEMITTER=make_libdeps_emitter('StaticLibrary'), - LIBEMITTER=make_indirect_emitter('LIBDEPS_LIBEMITTER'), - LIBDEPS_SHAREMITTER=make_libdeps_emitter('SharedArchive', ignore_progdeps=True), - SHAREMITTER=make_indirect_emitter('LIBDEPS_SHAREMITTER'), - LIBDEPS_SHLIBEMITTER=make_libdeps_emitter('SharedLibrary', dependency_visibility_honored), - SHLIBEMITTER=make_indirect_emitter('LIBDEPS_SHLIBEMITTER'), + LIBDEPS_LIBEMITTER=make_libdeps_emitter("StaticLibrary"), + LIBEMITTER=make_indirect_emitter("LIBDEPS_LIBEMITTER"), + LIBDEPS_SHAREMITTER=make_libdeps_emitter("SharedArchive", ignore_progdeps=True), + SHAREMITTER=make_indirect_emitter("LIBDEPS_SHAREMITTER"), + LIBDEPS_SHLIBEMITTER=make_libdeps_emitter( + "SharedLibrary", dependency_visibility_honored + ), + SHLIBEMITTER=make_indirect_emitter("LIBDEPS_SHLIBEMITTER"), LIBDEPS_PROGEMITTER=make_libdeps_emitter( - 'SharedLibrary' if emitting_shared else 'StaticLibrary'), - PROGEMITTER=make_indirect_emitter('LIBDEPS_PROGEMITTER'), + "SharedLibrary" if emitting_shared else "StaticLibrary" + ), + PROGEMITTER=make_indirect_emitter("LIBDEPS_PROGEMITTER"), ) env["_LIBDEPS_LIBS_WITH_TAGS"] = expand_libdeps_with_extraction_flags - env['_LIBDEPS_LIBS'] = ('$LINK_WHOLE_ARCHIVE_START ' - '$LINK_LIBGROUP_START ' - '$_LIBDEPS_LIBS_WITH_TAGS ' - '$LINK_LIBGROUP_END ' - '$LINK_WHOLE_ARCHIVE_END') + env["_LIBDEPS_LIBS"] = ( + "$LINK_WHOLE_ARCHIVE_START " + "$LINK_LIBGROUP_START " + "$_LIBDEPS_LIBS_WITH_TAGS " + "$LINK_LIBGROUP_END " + "$LINK_WHOLE_ARCHIVE_END" + ) - env.Prepend(_LIBFLAGS='$_LIBDEPS_TAGS $_LIBDEPS $_SYSLIBDEPS ') - for builder_name in ('Program', 'SharedLibrary', 'LoadableModule', 'SharedArchive'): + env.Prepend(_LIBFLAGS="$_LIBDEPS_TAGS $_LIBDEPS $_SYSLIBDEPS ") + for builder_name in ("Program", "SharedLibrary", "LoadableModule", "SharedArchive"): try: - update_scanner(env['BUILDERS'][builder_name]) + update_scanner(env["BUILDERS"][builder_name]) except KeyError: pass @@ -431,7 +463,7 @@ def setup_environment(env, emitting_shared=False): def setup_conftests(conf): def FindSysLibDep(context, name, libs, **kwargs): var = "LIBDEPS_" + name.upper() + "_SYSLIBDEP" - kwargs['autoadd'] = False + kwargs["autoadd"] = False for lib in libs: result = context.sconf.CheckLib(lib, **kwargs) context.did_show_result = 1 @@ -441,4 +473,4 @@ def setup_conftests(conf): context.env[var] = __missing_syslib(name) return context.Result(result) - conf.AddTest('FindSysLibDep', FindSysLibDep) + conf.AddTest("FindSysLibDep", FindSysLibDep) diff --git a/site_scons/site_tools/abilink.py b/site_scons/site_tools/abilink.py index 00f5e71a0a3..0dde1e5a262 100644 --- a/site_scons/site_tools/abilink.py +++ b/site_scons/site_tools/abilink.py @@ -23,14 +23,14 @@ import subprocess def _detect(env): try: - abidw = env['ABIDW'] + abidw = env["ABIDW"] if not abidw: return None return abidw except KeyError: pass - return env.WhereIs('abidw') + return env.WhereIs("abidw") def _add_emitter(builder): @@ -62,14 +62,16 @@ def _add_scanner(builder): new_results.append(abidw if abidw else base) return new_results - builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, - path_function=path_function) + builder.target_scanner = SCons.Scanner.Scanner( + function=new_scanner, path_function=path_function + ) def _add_action(builder): actions = builder.action builder.action = actions + SCons.Action.Action( - "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw") + "$ABIDW --no-show-locs $TARGET | md5sum > ${TARGET}.abidw" + ) def exists(env): @@ -82,9 +84,9 @@ def generate(env): if not exists(env): return - builder = env['BUILDERS']['SharedLibrary'] + builder = env["BUILDERS"]["SharedLibrary"] _add_emitter(builder) _add_action(builder) _add_scanner(builder) - _add_scanner(env['BUILDERS']['Program']) - _add_scanner(env['BUILDERS']['LoadableModule']) + _add_scanner(env["BUILDERS"]["Program"]) + _add_scanner(env["BUILDERS"]["LoadableModule"]) diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py index 6328f0ec0a4..0ff74a32144 100644 --- a/site_scons/site_tools/auto_install_binaries.py +++ b/site_scons/site_tools/auto_install_binaries.py @@ -102,26 +102,14 @@ if __name__ == "__main__": """ RoleInfo = namedtuple( - 'RoleInfo', - [ - 'alias_name', - 'alias', - 'components', - 'roles', - 'actions', - 'dependencies' - ], + "RoleInfo", + ["alias_name", "alias", "components", "roles", "actions", "dependencies"], ) -SuffixMap = namedtuple( - 'SuffixMap', - [ - 'directory', - 'default_roles', - ], -) +SuffixMap = namedtuple("SuffixMap", ["directory", "default_roles",],) + -class DeclaredRole(): +class DeclaredRole: def __init__(self, name, dependencies=None, transitive=False, silent=False): self.name = name @@ -133,18 +121,18 @@ class DeclaredRole(): self.transitive = transitive self.silent = silent + def declare_role(env, **kwargs): """Construct a new role declaration""" return DeclaredRole(**kwargs) + def declare_roles(env, roles, base_role=None, meta_role=None): """Given a list of role declarations, validate them and store them in the environment""" role_names = [role.name for role in roles] if len(role_names) != len(set(role_names)): - raise Exception( - "Cannot declare duplicate roles" - ) + raise Exception("Cannot declare duplicate roles") # Ensure that all roles named in dependency lists actually were # passed in as a role. @@ -193,9 +181,7 @@ def declare_roles(env, roles, base_role=None, meta_role=None): silents = [role for role in roles if role.silent] if len(silents) > 1: - raise Exception( - "No more than one role can be declared as silent" - ) + raise Exception("No more than one role can be declared as silent") # If a base role was given, then add it as a dependency of every # role that isn't the base role (which would be circular). @@ -205,7 +191,7 @@ def declare_roles(env, roles, base_role=None, meta_role=None): role.dependencies.add(base_role) # Become a dictionary, so we can look up roles easily. - roles = { role.name : role for role in roles } + roles = {role.name: role for role in roles} # If a meta role was given, then add every role which isn't the # meta role as one of its dependencies. @@ -228,6 +214,7 @@ def generate_alias(env, component, role, target="install"): role="" if env[ROLE_DECLARATIONS][role].silent else "-" + role, ) + def get_alias_map_entry(env, component, role): c_entry = env[ALIAS_MAP][component] try: @@ -240,28 +227,23 @@ def get_alias_map_entry(env, component, role): components=set(), roles=set(), actions=[], - dependencies=[] + dependencies=[], ) c_entry[role] = r_entry return r_entry + def get_package_name(env, component, role): """Return the package file name for the component and role combination.""" basename = env[PACKAGE_ALIAS_MAP].get( - (component, role), - "{component}-{role}".format(component=component, role=role) + (component, role), "{component}-{role}".format(component=component, role=role) ) return basename def get_dependent_actions( - env, - components, - roles, - non_transitive_roles, - node, - cb=None, + env, components, roles, non_transitive_roles, node, cb=None, ): """ Check if node is a transitive dependency of components and roles @@ -285,16 +267,13 @@ def get_dependent_actions( # If they are overlapping then that means we can't transition to a # new role during scanning. if env[BASE_ROLE] not in roles: - can_transfer = ( - non_transitive_roles - and roles.isdisjoint(non_transitive_roles) - ) + can_transfer = non_transitive_roles and roles.isdisjoint(non_transitive_roles) else: can_transfer = True node_roles = { - role for role - in getattr(node.attributes, ROLES, set()) + role + for role in getattr(node.attributes, ROLES, set()) if role != env[META_ROLE] } if ( @@ -308,14 +287,7 @@ def get_dependent_actions( return [] if cb is not None and callable(cb): - return cb( - components, - roles, - non_transitive_roles, - node, - node_roles, - actions, - ) + return cb(components, roles, non_transitive_roles, node, node_roles, actions,) return actions @@ -325,17 +297,19 @@ def scan_for_transitive_install(node, env, cb=None): install_sources = node.sources # Filter out all components = { - component for component - in getattr(node.sources[0].attributes, COMPONENTS, set()) + component + for component in getattr(node.sources[0].attributes, COMPONENTS, set()) if component != "all" } roles = { - role for role - in getattr(node.sources[0].attributes, ROLES, set()) + role + for role in getattr(node.sources[0].attributes, ROLES, set()) if role != env[META_ROLE] } - non_transitive_roles = {role for role in roles if env[ROLE_DECLARATIONS][role].transitive} + non_transitive_roles = { + role for role in roles if env[ROLE_DECLARATIONS][role].transitive + } for install_source in install_sources: install_executor = install_source.get_executor() if not install_executor: @@ -348,12 +322,7 @@ def scan_for_transitive_install(node, env, cb=None): for grandchild in grandchildren: results.extend( get_dependent_actions( - env, - components, - roles, - non_transitive_roles, - grandchild, - cb=cb, + env, components, roles, non_transitive_roles, grandchild, cb=cb, ) ) @@ -388,10 +357,13 @@ def collect_transitive_files(env, source, installed, cache=None): children_to_collect.append(child) if children_to_collect: - files.extend(collect_transitive_files(env, children_to_collect, installed, cache)) + files.extend( + collect_transitive_files(env, children_to_collect, installed, cache) + ) return files + def archive_builder(source, target, env, for_signature): """Build archives of the AutoInstall'd sources.""" if not source: @@ -419,7 +391,7 @@ def archive_builder(source, target, env, for_signature): # In python slicing a string with [:-0] gives an empty string. So # make sure we have a prefix to slice off before trying it. if prefix_elems: - common_ancestor = dest_dir_elems[:-len(prefix_elems)] + common_ancestor = dest_dir_elems[: -len(prefix_elems)] else: common_ancestor = dest_dir_elems @@ -443,7 +415,7 @@ def archive_builder(source, target, env, for_signature): # set of known installed files along to the transitive dependency # walk so we can filter out files that aren't in the install # directory. - installed = env.get('__AIB_INSTALLED_SET', set()) + installed = env.get("__AIB_INSTALLED_SET", set()) transitive_files = collect_transitive_files(env, aliases, installed) paths = {file.get_abspath() for file in transitive_files} @@ -455,10 +427,9 @@ def archive_builder(source, target, env, for_signature): # TODO: relpath is costly, and we do it for every file in the archive here. We should # find a way to avoid the repeated relpath invocation, probably by bucketing by directory. - relative_files = " ".join([ - escape_func(os.path.relpath(path, common_ancestor)) - for path in paths - ]) + relative_files = " ".join( + [escape_func(os.path.relpath(path, common_ancestor)) for path in paths] + ) return " ".join([command_prefix, relative_files]) @@ -477,14 +448,8 @@ def auto_install(env, target, source, **kwargs): roles = roles.union(set(kwargs[ROLES])) component = kwargs.get(PRIMARY_COMPONENT) - if ( - component is not None - and (not isinstance(component, str) - or " " in component) - ): - raise Exception( - "AIB_COMPONENT must be a string and contain no whitespace." - ) + if component is not None and (not isinstance(component, str) or " " in component): + raise Exception("AIB_COMPONENT must be a string and contain no whitespace.") components = { component, @@ -517,15 +482,12 @@ def auto_install(env, target, source, **kwargs): # TODO: Find a way to not need this early subst. target = env.Dir(env.subst(target, source=source)) - action = env.Install( - target=target, - source=s, - ) + action = env.Install(target=target, source=s,) setattr( s.attributes, INSTALL_ACTIONS, - action if isinstance(action, (list, set)) else [action] + action if isinstance(action, (list, set)) else [action], ) actions.append(action) @@ -567,8 +529,7 @@ def finalize_install_dependencies(env): # TODO: $BUILD_ROOT should be $VARIANT_DIR after we fix our dir # setup later on. make_archive_script = env.Textfile( - target="$BUILD_ROOT/aib_make_archive.py", - source=[AIB_MAKE_ARCHIVE_CONTENT], + target="$BUILD_ROOT/aib_make_archive.py", source=[AIB_MAKE_ARCHIVE_CONTENT], ) for component, rolemap in env[ALIAS_MAP].items(): @@ -623,7 +584,7 @@ def auto_install_emitter(target, source, env): # way available to us. # # We're working with upstream to expose this information. - if 'conftest' in str(entry): + if "conftest" in str(entry): continue auto_install_mapping = env[SUFFIX_MAP].get(suffix) @@ -670,19 +631,20 @@ def add_package_name_alias(env, component, role, name): """Add a package name mapping for the combination of component and role.""" # Verify we didn't get a None or empty string for any argument if not name: - raise Exception("when setting a package name alias must provide a name parameter") + raise Exception( + "when setting a package name alias must provide a name parameter" + ) if not component: raise Exception("No component provided for package name alias") if not role: raise Exception("No role provided for package name alias") env[PACKAGE_ALIAS_MAP][(component, role)] = name + def suffix_mapping(env, directory=False, default_roles=False): """Generate a SuffixMap object from source and target.""" - return SuffixMap( - directory=directory, - default_roles=default_roles, - ) + return SuffixMap(directory=directory, default_roles=default_roles,) + def dest_dir_generator(initial_value=None): """Memoized dest_dir_generator""" @@ -696,7 +658,7 @@ def dest_dir_generator(initial_value=None): # absolute path here because if it is the sub Dir call will # not expand correctly. prefix = env.subst("$PREFIX") - if prefix and prefix[0] == '/': + if prefix and prefix[0] == "/": prefix = prefix[1:] if dd[1] is not None and dd[0] == prefix: @@ -751,7 +713,7 @@ def generate(env): # pylint: disable=too-many-statements bld = SCons.Builder.Builder( action=SCons.Action.CommandGeneratorAction( archive_builder, - {"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"}, + {"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"}, ) ) env.Append(BUILDERS={"__AibArchive": bld}) @@ -763,7 +725,7 @@ def generate(env): # pylint: disable=too-many-statements env["DESTDIR"] = dest_dir_generator(env.get("DESTDIR", None)) env["PREFIX_BINDIR"] = env.get("PREFIX_BINDIR", "$DESTDIR/bin") env["PREFIX_LIBDIR"] = env.get("PREFIX_LIBDIR", "$DESTDIR/lib") - env["PREFIX_SHAREDIR"] = env.get("PREFIX_SHAREDIR", "$DESTDIR/share") + env["PREFIX_SHAREDIR"] = env.get("PREFIX_SHAREDIR", "$DESTDIR/share") env["PREFIX_DOCDIR"] = env.get("PREFIX_DOCDIR", "$PREFIX_SHAREDIR/doc") env["PREFIX_INCLUDEDIR"] = env.get("PREFIX_INCLUDEDIR", "$DESTDIR/include") env["PREFIX_DEBUGDIR"] = env.get("PREFIX_DEBUGDIR", _aib_debugdir) @@ -793,10 +755,7 @@ def generate(env): # pylint: disable=too-many-statements base_emitter = builder.emitter # TODO: investigate if using a ListEmitter here can cause # problems if AIB is not loaded last - new_emitter = SCons.Builder.ListEmitter([ - base_emitter, - auto_install_emitter, - ]) + new_emitter = SCons.Builder.ListEmitter([base_emitter, auto_install_emitter,]) builder.emitter = new_emitter base_install_builder = install.BaseInstallBuilder diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py index d5ff49c06cf..cabb2d316b5 100644 --- a/site_scons/site_tools/ccache.py +++ b/site_scons/site_tools/ccache.py @@ -21,17 +21,18 @@ import SCons from pkg_resources import parse_version # This is the oldest version of ccache that offers support for -gsplit-dwarf -_ccache_version_min = parse_version('3.2.3') +_ccache_version_min = parse_version("3.2.3") _ccache_version_found = None + def exists(env): """Look for a viable ccache implementation that meets our version requirements.""" # If we already generated, we definitely exist - if 'CCACHE_VERSION' in env: + if "CCACHE_VERSION" in env: return True - ccache = env.get('CCACHE', False) + ccache = env.get("CCACHE", False) if not ccache: return False @@ -39,22 +40,26 @@ def exists(env): if not ccache: return False - pipe = SCons.Action._subproc(env, - SCons.Util.CLVar(ccache) + ['--version'], stdin='devnull', - stderr='devnull', stdout=subprocess.PIPE) + pipe = SCons.Action._subproc( + env, + SCons.Util.CLVar(ccache) + ["--version"], + stdin="devnull", + stderr="devnull", + stdout=subprocess.PIPE, + ) if pipe.wait() != 0: return False validated = False for line in pipe.stdout: - line = line.decode('utf-8') + line = line.decode("utf-8") if validated: continue # consume all data - version_banner = re.search(r'^ccache version', line) + version_banner = re.search(r"^ccache version", line) if not version_banner: continue - ccache_version = re.split('ccache version (.+)', line) + ccache_version = re.split("ccache version (.+)", line) if len(ccache_version) < 2: continue global _ccache_version_found @@ -64,11 +69,12 @@ def exists(env): return validated + def generate(env): """Add ccache support.""" # If we have already generated the tool, don't generate it again. - if 'CCACHE_VERSION' in env: + if "CCACHE_VERSION" in env: return # If we can't find ccache, or it is too old a version, don't @@ -81,18 +87,18 @@ def generate(env): # if ccache is active. Looking at the CCACHE variable in the # environment is not sufficient, since the user may have set it, # but it doesn't work or is out of date. - env['CCACHE_VERSION'] = _ccache_version_found + env["CCACHE_VERSION"] = _ccache_version_found # ccache does not support response files so force scons to always # use the full command # # Note: This only works for Python versions >= 3.5 - env['MAXLINELENGTH'] = math.inf + env["MAXLINELENGTH"] = math.inf # Add ccache to the relevant command lines. Wrap the reference to # ccache in the $( $) pattern so that turning ccache on or off # doesn't invalidate your build. - env['CCCOM'] = '$( $CCACHE $)' + env['CCCOM'] - env['CXXCOM'] = '$( $CCACHE $)' + env['CXXCOM'] - env['SHCCCOM'] = '$( $CCACHE $)' + env['SHCCCOM'] - env['SHCXXCOM'] = '$( $CCACHE $)' + env['SHCXXCOM'] + env["CCCOM"] = "$( $CCACHE $)" + env["CCCOM"] + env["CXXCOM"] = "$( $CCACHE $)" + env["CXXCOM"] + env["SHCCCOM"] = "$( $CCACHE $)" + env["SHCCCOM"] + env["SHCXXCOM"] = "$( $CCACHE $)" + env["SHCXXCOM"] diff --git a/site_scons/site_tools/compilation_db.py b/site_scons/site_tools/compilation_db.py index 03c5e70b42d..c3769275e26 100644 --- a/site_scons/site_tools/compilation_db.py +++ b/site_scons/site_tools/compilation_db.py @@ -32,13 +32,13 @@ __COMPILATION_DB_ENTRIES = [] # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if # we could obtain this from SCons. -_CSuffixes = ['.c'] -if not SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CSuffixes.append('.C') +_CSuffixes = [".c"] +if not SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CSuffixes.append(".C") -_CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++'] -if SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CXXSuffixes.append('.C') +_CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] +if SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CXXSuffixes.append(".C") # We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even @@ -78,9 +78,13 @@ def makeEmitCompilationDbEntry(comstr): dbtarget = __CompilationDbNode(source) entry = env.__COMPILATIONDB_Entry( - target=dbtarget, source=[], __COMPILATIONDB_UTARGET=target, - __COMPILATIONDB_USOURCE=source, __COMPILATIONDB_UACTION=user_action, - __COMPILATIONDB_ENV=env) + target=dbtarget, + source=[], + __COMPILATIONDB_UTARGET=target, + __COMPILATIONDB_USOURCE=source, + __COMPILATIONDB_UACTION=user_action, + __COMPILATIONDB_ENV=env, + ) # TODO: Technically, these next two lines should not be required: it should be fine to # cache the entries. However, they don't seem to update properly. Since they are quick @@ -107,15 +111,16 @@ def CompilationDbEntryAction(target, source, env, **kw): :return: None """ - command = env['__COMPILATIONDB_UACTION'].strfunction( - target=env['__COMPILATIONDB_UTARGET'], - source=env['__COMPILATIONDB_USOURCE'], - env=env['__COMPILATIONDB_ENV'], + command = env["__COMPILATIONDB_UACTION"].strfunction( + target=env["__COMPILATIONDB_UTARGET"], + source=env["__COMPILATIONDB_USOURCE"], + env=env["__COMPILATIONDB_ENV"], ) entry = { - "directory": env.Dir('#').abspath, "command": command, - "file": str(env['__COMPILATIONDB_USOURCE'][0]) + "directory": env.Dir("#").abspath, + "command": command, + "file": str(env["__COMPILATIONDB_USOURCE"][0]), } target[0].write(entry) @@ -127,8 +132,10 @@ def WriteCompilationDb(target, source, env): for s in __COMPILATION_DB_ENTRIES: entries.append(s.read()) - with open(str(target[0]), 'w') as target_file: - json.dump(entries, target_file, sort_keys=True, indent=4, separators=(',', ': ')) + with open(str(target[0]), "w") as target_file: + json.dump( + entries, target_file, sort_keys=True, indent=4, separators=(",", ": ") + ) def ScanCompilationDb(node, env, path): @@ -139,18 +146,25 @@ def generate(env, **kwargs): static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - env['COMPILATIONDB_COMSTR'] = kwargs.get('COMPILATIONDB_COMSTR', - 'Building compilation database $TARGET') + env["COMPILATIONDB_COMSTR"] = kwargs.get( + "COMPILATIONDB_COMSTR", "Building compilation database $TARGET" + ) components_by_suffix = itertools.chain( - itertools.product(_CSuffixes, [ - (static_obj, SCons.Defaults.StaticObjectEmitter, '$CCCOM'), - (shared_obj, SCons.Defaults.SharedObjectEmitter, '$SHCCCOM'), - ]), - itertools.product(_CXXSuffixes, [ - (static_obj, SCons.Defaults.StaticObjectEmitter, '$CXXCOM'), - (shared_obj, SCons.Defaults.SharedObjectEmitter, '$SHCXXCOM'), - ]), + itertools.product( + _CSuffixes, + [ + (static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"), + (shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"), + ], + ), + itertools.product( + _CXXSuffixes, + [ + (static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"), + (shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"), + ], + ), ) for entry in components_by_suffix: @@ -159,17 +173,20 @@ def generate(env, **kwargs): # Assumes a dictionary emitter emitter = builder.emitter[suffix] - builder.emitter[suffix] = SCons.Builder.ListEmitter([ - emitter, - makeEmitCompilationDbEntry(command), - ]) + builder.emitter[suffix] = SCons.Builder.ListEmitter( + [emitter, makeEmitCompilationDbEntry(command),] + ) - env['BUILDERS']['__COMPILATIONDB_Entry'] = SCons.Builder.Builder( - action=SCons.Action.Action(CompilationDbEntryAction, None), ) + env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder( + action=SCons.Action.Action(CompilationDbEntryAction, None), + ) - env['BUILDERS']['__COMPILATIONDB_Database'] = SCons.Builder.Builder( + env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder( action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"), - target_scanner=SCons.Scanner.Scanner(function=ScanCompilationDb, node_class=None)) + target_scanner=SCons.Scanner.Scanner( + function=ScanCompilationDb, node_class=None + ), + ) def CompilationDatabase(env, target): result = env.__COMPILATIONDB_Database(target=target, source=[]) @@ -179,7 +196,7 @@ def generate(env, **kwargs): return result - env.AddMethod(CompilationDatabase, 'CompilationDatabase') + env.AddMethod(CompilationDatabase, "CompilationDatabase") def exists(env): diff --git a/site_scons/site_tools/dagger/__init__.py b/site_scons/site_tools/dagger/__init__.py index c63bfc6967e..2e82ebeb058 100644 --- a/site_scons/site_tools/dagger/__init__.py +++ b/site_scons/site_tools/dagger/__init__.py @@ -7,6 +7,7 @@ import SCons from . import dagger + def generate(env, **kwargs): """The entry point for our tool. However, the builder for the JSON file is not actually run until the Dagger method is called @@ -14,28 +15,43 @@ def generate(env, **kwargs): to the native builders for object/libraries. """ - env.Replace(LIBEMITTER=SCons.Builder.ListEmitter([env['LIBEMITTER'], dagger.emit_lib_db_entry])) + env.Replace( + LIBEMITTER=SCons.Builder.ListEmitter( + [env["LIBEMITTER"], dagger.emit_lib_db_entry] + ) + ) running_os = os.sys.platform - if not (running_os.startswith('win') or running_os.startswith('sun')): + if not (running_os.startswith("win") or running_os.startswith("sun")): env.Replace( - PROGEMITTER=SCons.Builder.ListEmitter([env['PROGEMITTER'], dagger.emit_prog_db_entry])) + PROGEMITTER=SCons.Builder.ListEmitter( + [env["PROGEMITTER"], dagger.emit_prog_db_entry] + ) + ) static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - suffixes = ['.c', '.cc', '.cxx', '.cpp'] + suffixes = [".c", ".cc", ".cxx", ".cpp"] obj_builders = [static_obj, shared_obj] - default_emitters = [SCons.Defaults.StaticObjectEmitter, SCons.Defaults.SharedObjectEmitter] + default_emitters = [ + SCons.Defaults.StaticObjectEmitter, + SCons.Defaults.SharedObjectEmitter, + ] for suffix in suffixes: for i in range(len(obj_builders)): obj_builders[i].add_emitter( - suffix, SCons.Builder.ListEmitter([dagger.emit_obj_db_entry, default_emitters[i]])) + suffix, + SCons.Builder.ListEmitter( + [dagger.emit_obj_db_entry, default_emitters[i]] + ), + ) - env['BUILDERS']['__OBJ_DATABASE'] = SCons.Builder.Builder( - action=SCons.Action.Action(dagger.write_obj_db, None)) + env["BUILDERS"]["__OBJ_DATABASE"] = SCons.Builder.Builder( + action=SCons.Action.Action(dagger.write_obj_db, None) + ) def Dagger(env, target="library_dependency_graph.json"): - if running_os.startswith('win') or running_os.startswith('sun'): + if running_os.startswith("win") or running_os.startswith("sun"): logging.error("Dagger is only supported on OSX and Linux") return result = env.__OBJ_DATABASE(target=target, source=[]) @@ -44,7 +60,7 @@ def generate(env, **kwargs): return result - env.AddMethod(Dagger, 'Dagger') + env.AddMethod(Dagger, "Dagger") def exists(env): diff --git a/site_scons/site_tools/dagger/dagger.py b/site_scons/site_tools/dagger/dagger.py index cc208dd23c2..061401e9bd7 100644 --- a/site_scons/site_tools/dagger/dagger.py +++ b/site_scons/site_tools/dagger/dagger.py @@ -44,9 +44,12 @@ from . import graph from . import graph_consts -LIB_DB = [] # Stores every SCons library nodes -OBJ_DB = [] # Stores every SCons object file node -EXE_DB = {} # Stores every SCons executable node, with the object files that build into it {Executable: [object files]} +LIB_DB = [] # Stores every SCons library nodes +OBJ_DB = [] # Stores every SCons object file node +EXE_DB = ( + {} +) # Stores every SCons executable node, with the object files that build into it {Executable: [object files]} + def list_process(items): """From WIL, converts lists generated from an NM command with unicode strings to lists @@ -57,12 +60,12 @@ def list_process(items): for l in items: if isinstance(l, list): for i in l: - if i.startswith('.L'): + if i.startswith(".L"): continue else: r.append(str(i)) else: - if l.startswith('.L'): + if l.startswith(".L"): continue else: r.append(str(l)) @@ -75,26 +78,26 @@ def get_symbol_worker(object_file, task): """From WIL, launches a worker subprocess which collects either symbols defined or symbols required by an object file""" - platform = 'linux' if sys.platform.startswith('linux') else 'darwin' + platform = "linux" if sys.platform.startswith("linux") else "darwin" - if platform == 'linux': - if task == 'used': + if platform == "linux": + if task == "used": cmd = r'nm "' + object_file + r'" | grep -e "U " | c++filt' - elif task == 'defined': + elif task == "defined": cmd = r'nm "' + object_file + r'" | grep -v -e "U " | c++filt' - elif platform == 'darwin': - if task == 'used': + elif platform == "darwin": + if task == "used": cmd = "nm -u " + object_file + " | c++filt" - elif task == 'defined': + elif task == "defined": cmd = "nm -jU " + object_file + " | c++filt" p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) uses = p.communicate()[0].decode() - if platform == 'linux': - return list_process([use[19:] for use in uses.split('\n') if use != '']) - elif platform == 'darwin': - return list_process([use.strip() for use in uses.split('\n') if use != '']) + if platform == "linux": + return list_process([use[19:] for use in uses.split("\n") if use != ""]) + elif platform == "darwin": + return list_process([use.strip() for use in uses.split("\n") if use != ""]) def emit_obj_db_entry(target, source, env): @@ -135,7 +138,7 @@ def __compute_libdeps(node): env = node.get_env() deps = set() - for child in env.Flatten(getattr(node.attributes, 'libdeps_direct', [])): + for child in env.Flatten(getattr(node.attributes, "libdeps_direct", [])): if not child: continue deps.add(child) diff --git a/site_scons/site_tools/dagger/graph.py b/site_scons/site_tools/dagger/graph.py index d8d1e6938ee..fdbc4dc98ba 100644 --- a/site_scons/site_tools/dagger/graph.py +++ b/site_scons/site_tools/dagger/graph.py @@ -6,6 +6,7 @@ import copy from . import graph_consts + class Graph(object): """Graph class for storing the build dependency graph. The graph stores the directed edges as a nested dict of { RelationshipType: {From_Node: Set of @@ -18,8 +19,8 @@ class Graph(object): A graph can be initialized with a .json file, graph object, or with no args """ if isinstance(input, str): - if input.endswith('.json'): - with open(input, 'r') as f: + if input.endswith(".json"): + with open(input, "r") as f: data = json.load(f, encoding="ascii") nodes = {} should_fail = False @@ -27,8 +28,9 @@ class Graph(object): for node in data["nodes"]: id = str(node["id"]) try: - nodes[id] = node_factory(id, int(node["node"]["type"]), - dict_source=node["node"]) + nodes[id] = node_factory( + id, int(node["node"]["type"]), dict_source=node["node"] + ) except Exception as e: logging.warning("Malformed Data: " + id) should_fail = True @@ -116,7 +118,9 @@ class Graph(object): if from_node not in self._edges[relationship]: self._edges[relationship][from_node] = set() - if any(item is None for item in (from_node, to_node, from_node_obj, to_node_obj)): + if any( + item is None for item in (from_node, to_node, from_node_obj, to_node_obj) + ): raise ValueError self._edges[relationship][from_node].add(to_node) @@ -150,21 +154,28 @@ class Graph(object): edges_dict = self._edges[edge_type] for node in list(edges_dict.keys()): to_nodes = list(self._edges[edge_type][node]) - to_nodes_dicts = [{"index": node_index[to_node], "id": to_node} - for to_node in to_nodes] - - data["edges"].append({"type": edge_type, - "from_node": {"id": node, - "index": node_index[node]}, - "to_node": to_nodes_dicts}) - - with open(filename, 'w', encoding="ascii") as outfile: + to_nodes_dicts = [ + {"index": node_index[to_node], "id": to_node} + for to_node in to_nodes + ] + + data["edges"].append( + { + "type": edge_type, + "from_node": {"id": node, "index": node_index[node]}, + "to_node": to_nodes_dicts, + } + ) + + with open(filename, "w", encoding="ascii") as outfile: json.dump(data, outfile, indent=4) def __str__(self): - return ("<Number of Nodes : {0}, Number of Edges : {1}, " - "Hash: {2}>").format(len(list(self._nodes.keys())), - sum(len(x) for x in list(self._edges.values())), hash(self)) + return ("<Number of Nodes : {0}, Number of Edges : {1}, " "Hash: {2}>").format( + len(list(self._nodes.keys())), + sum(len(x) for x in list(self._edges.values())), + hash(self), + ) class NodeInterface(object, metaclass=abc.ABCMeta): @@ -183,6 +194,7 @@ class NodeInterface(object, metaclass=abc.ABCMeta): class NodeLib(NodeInterface): """NodeLib class which represents a library within the graph """ + def __init__(self, id, name, input=None): if isinstance(input, dict): should_fail = False @@ -283,10 +295,13 @@ class NodeLib(NodeInterface): def __eq__(self, other): if isinstance(other, NodeLib): - return (self._id == other._id and self._defined_symbols == other._defined_symbols - and self._defined_files == other._defined_files - and self._dependent_libs == other._dependent_libs - and self._dependent_files == other._dependent_files) + return ( + self._id == other._id + and self._defined_symbols == other._defined_symbols + and self._defined_files == other._defined_files + and self._dependent_libs == other._dependent_libs + and self._dependent_files == other._dependent_files + ) else: return False @@ -409,10 +424,13 @@ class NodeSymbol(NodeInterface): def __eq__(self, other): if isinstance(other, NodeSymbol): - return (self.id == other.id and self._libs == other._libs - and self._files == other._files - and self._dependent_libs == other._dependent_libs - and self._dependent_files == other._dependent_files) + return ( + self.id == other.id + and self._libs == other._libs + and self._files == other._files + and self._dependent_libs == other._dependent_libs + and self._dependent_files == other._dependent_files + ) else: return False @@ -527,10 +545,13 @@ class NodeFile(NodeInterface): def __eq__(self, other): if isinstance(other, NodeSymbol): - return (self.id == other.id and self._lib == other._lib - and self._dependent_libs == other._dependent_libs - and self._dependent_files == other._dependent_files - and self._defined_symbols == other._defined_symbols) + return ( + self.id == other.id + and self._lib == other._lib + and self._dependent_libs == other._dependent_libs + and self._dependent_files == other._dependent_files + and self._defined_symbols == other._defined_symbols + ) else: return False diff --git a/site_scons/site_tools/dagger/graph_consts.py b/site_scons/site_tools/dagger/graph_consts.py index eae5db9b6c6..a2077675463 100644 --- a/site_scons/site_tools/dagger/graph_consts.py +++ b/site_scons/site_tools/dagger/graph_consts.py @@ -22,4 +22,3 @@ NODE_TYPES = list(range(1, 5)) """Error/query codes""" NODE_NOT_FOUND = 1 - diff --git a/site_scons/site_tools/dagger/graph_test.py b/site_scons/site_tools/dagger/graph_test.py index 39bdf77ab7c..e532386f852 100644 --- a/site_scons/site_tools/dagger/graph_test.py +++ b/site_scons/site_tools/dagger/graph_test.py @@ -73,22 +73,33 @@ class CustomAssertions: raise AssertionError("Nodes not of same type") if node1.type == graph_consts.NODE_LIB: - if (node1._defined_symbols != node2._defined_symbols - or node1._defined_files != node2._defined_files - or node1._dependent_libs != node2._dependent_libs - or node1._dependent_files != node2._dependent_files or node1._id != node2._id): + if ( + node1._defined_symbols != node2._defined_symbols + or node1._defined_files != node2._defined_files + or node1._dependent_libs != node2._dependent_libs + or node1._dependent_files != node2._dependent_files + or node1._id != node2._id + ): raise AssertionError("Nodes not equal") elif node1.type == graph_consts.NODE_SYM: - if (node1._libs != node2._libs or node1._files != node2._files - or node1._dependent_libs != node2._dependent_libs - or node1._dependent_files != node2._dependent_files or node1.id != node2.id): + if ( + node1._libs != node2._libs + or node1._files != node2._files + or node1._dependent_libs != node2._dependent_libs + or node1._dependent_files != node2._dependent_files + or node1.id != node2.id + ): raise AssertionError("Nodes not equal") else: - if (node1._lib != node2._lib or node1._dependent_libs != node2._dependent_libs - or node1._dependent_files != node2._dependent_files or node1.id != node2.id - or node1._defined_symbols != node2._defined_symbols): + if ( + node1._lib != node2._lib + or node1._dependent_libs != node2._dependent_libs + or node1._dependent_files != node2._dependent_files + or node1.id != node2.id + or node1._defined_symbols != node2._defined_symbols + ): raise AssertionError("Nodes not equal") @@ -131,51 +142,80 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): self.assertRaises(TypeError, self.g.add_node, "not a node") def test_add_edge_exceptions(self): - self.assertRaises(TypeError, self.g.add_edge, "NOT A RELATIONSHIP", - self.from_node_lib.id, self.to_node_lib.id) - - self.assertRaises(ValueError, self.g.add_edge, - graph_consts.LIB_LIB, "not a node", "not a node") + self.assertRaises( + TypeError, + self.g.add_edge, + "NOT A RELATIONSHIP", + self.from_node_lib.id, + self.to_node_lib.id, + ) + + self.assertRaises( + ValueError, + self.g.add_edge, + graph_consts.LIB_LIB, + "not a node", + "not a node", + ) def test_add_edge_libs(self): - self.g.add_edge(graph_consts.LIB_LIB, self.from_node_lib.id, - self.to_node_lib.id) - self.g.add_edge(graph_consts.LIB_LIB, self.from_node_lib.id, - self.to_node_lib.id) - self.g.add_edge(graph_consts.LIB_SYM, self.from_node_lib.id, - self.to_node_sym.id) - self.g.add_edge(graph_consts.LIB_FIL, self.from_node_lib.id, - self.to_node_file.id) - - self.assertEqual(self.g.edges[graph_consts.LIB_LIB][ - self.from_node_lib.id], set([self.to_node_lib.id])) - - self.assertEqual(self.g.edges[graph_consts.LIB_SYM][ - self.from_node_lib.id], set([self.to_node_sym.id])) - - self.assertEqual(self.g.edges[graph_consts.LIB_FIL][ - self.from_node_lib.id], set([self.to_node_file.id])) - - self.assertEqual(self.to_node_lib.dependent_libs, - set([self.from_node_lib.id])) + self.g.add_edge( + graph_consts.LIB_LIB, self.from_node_lib.id, self.to_node_lib.id + ) + self.g.add_edge( + graph_consts.LIB_LIB, self.from_node_lib.id, self.to_node_lib.id + ) + self.g.add_edge( + graph_consts.LIB_SYM, self.from_node_lib.id, self.to_node_sym.id + ) + self.g.add_edge( + graph_consts.LIB_FIL, self.from_node_lib.id, self.to_node_file.id + ) + + self.assertEqual( + self.g.edges[graph_consts.LIB_LIB][self.from_node_lib.id], + set([self.to_node_lib.id]), + ) + + self.assertEqual( + self.g.edges[graph_consts.LIB_SYM][self.from_node_lib.id], + set([self.to_node_sym.id]), + ) + + self.assertEqual( + self.g.edges[graph_consts.LIB_FIL][self.from_node_lib.id], + set([self.to_node_file.id]), + ) + + self.assertEqual(self.to_node_lib.dependent_libs, set([self.from_node_lib.id])) def test_add_edge_files(self): - self.g.add_edge(graph_consts.FIL_FIL, self.from_node_file.id, - self.to_node_file.id) - self.g.add_edge(graph_consts.FIL_SYM, self.from_node_file.id, - self.to_node_sym.id) - self.g.add_edge(graph_consts.FIL_LIB, self.from_node_file.id, - self.to_node_lib.id) - - self.assertEqual(self.g.edges[graph_consts.FIL_FIL][ - self.from_node_file.id], set([self.to_node_file.id])) - self.assertEqual(self.g.edges[graph_consts.FIL_SYM][ - self.from_node_file.id], set([self.to_node_sym.id])) - self.assertEqual(self.g.edges[graph_consts.FIL_LIB][ - self.from_node_file.id], set([self.to_node_lib.id])) - - self.assertEqual(self.to_node_file.dependent_files, - set([self.from_node_file.id])) + self.g.add_edge( + graph_consts.FIL_FIL, self.from_node_file.id, self.to_node_file.id + ) + self.g.add_edge( + graph_consts.FIL_SYM, self.from_node_file.id, self.to_node_sym.id + ) + self.g.add_edge( + graph_consts.FIL_LIB, self.from_node_file.id, self.to_node_lib.id + ) + + self.assertEqual( + self.g.edges[graph_consts.FIL_FIL][self.from_node_file.id], + set([self.to_node_file.id]), + ) + self.assertEqual( + self.g.edges[graph_consts.FIL_SYM][self.from_node_file.id], + set([self.to_node_sym.id]), + ) + self.assertEqual( + self.g.edges[graph_consts.FIL_LIB][self.from_node_file.id], + set([self.to_node_lib.id]), + ) + + self.assertEqual( + self.to_node_file.dependent_files, set([self.from_node_file.id]) + ) def test_export_to_json(self): generated_graph = generate_graph() @@ -197,10 +237,11 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): # the need for a custom assertion self.assertNodeEquals( - graph_fromJSON.get_node(id), correct_graph.get_node(id)) + graph_fromJSON.get_node(id), correct_graph.get_node(id) + ) self.assertEqual(graph_fromJSON.edges, correct_graph.edges) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py index cc72c0655f7..6fab5559521 100644 --- a/site_scons/site_tools/distsrc.py +++ b/site_scons/site_tools/distsrc.py @@ -26,13 +26,15 @@ from distutils.spawn import find_executable __distsrc_callbacks = [] + class DistSrcFile: def __init__(self, **kwargs): - [ setattr(self, key, val) for (key, val) in list(kwargs.items()) ] + [setattr(self, key, val) for (key, val) in list(kwargs.items())] def __str__(self): return self.name + class DistSrcArchive: def __init__(self, archive_type, archive_file, filename, mode): self.archive_type = archive_type @@ -44,17 +46,14 @@ class DistSrcArchive: def Open(filename): if filename.endswith("tar"): return DistSrcTarArchive( - 'tar', - tarfile.open(filename, 'r', format=tarfile.PAX_FORMAT), + "tar", + tarfile.open(filename, "r", format=tarfile.PAX_FORMAT), filename, - 'r', + "r", ) elif filename.endswith("zip"): return DistSrcZipArchive( - 'zip', - zipfile.ZipFile(filename, 'a'), - filename, - 'a', + "zip", zipfile.ZipFile(filename, "a"), filename, "a", ) def close(self): @@ -78,29 +77,31 @@ class DistSrcTarArchive(DistSrcArchive): uid=item_data.uid, gid=item_data.gid, uname=item_data.uname, - gname=item_data.uname + gname=item_data.uname, ) - def append_file_contents(self, filename, file_contents, - mtime=time.time(), - mode=0o644, - uname="root", - gname="root"): + def append_file_contents( + self, + filename, + file_contents, + mtime=time.time(), + mode=0o644, + uname="root", + gname="root", + ): file_metadata = tarfile.TarInfo(name=filename) file_metadata.mtime = mtime file_metadata.mode = mode file_metadata.uname = uname file_metadata.gname = gname file_metadata.size = len(file_contents) - file_buf = io.BytesIO(file_contents.encode('utf-8')) - if self.archive_mode == 'r': + file_buf = io.BytesIO(file_contents.encode("utf-8")) + if self.archive_mode == "r": self.archive_file.close() self.archive_file = tarfile.open( - self.archive_name, - 'a', - format=tarfile.PAX_FORMAT, + self.archive_name, "a", format=tarfile.PAX_FORMAT, ) - self.archive_mode = 'a' + self.archive_mode = "a" self.archive_file.addfile(file_metadata, fileobj=file_buf) def append_file(self, filename, localfile): @@ -126,24 +127,31 @@ class DistSrcZipArchive(DistSrcArchive): uid=0, gid=0, uname="root", - gname="root" + gname="root", ) - def append_file_contents(self, filename, file_contents, - mtime=time.time(), - mode=0o644, - uname="root", - gname="root"): + def append_file_contents( + self, + filename, + file_contents, + mtime=time.time(), + mode=0o644, + uname="root", + gname="root", + ): self.archive_file.writestr(filename, file_contents) def append_file(self, filename, localfile): self.archive_file.write(localfile, arcname=filename) + def build_error_action(msg): def error_stub(target=None, source=None, env=None): print(msg) env.Exit(1) - return [ error_stub ] + + return [error_stub] + def distsrc_action_generator(source, target, env, for_signature): # This is done in two stages because env.WhereIs doesn't seem to work @@ -163,16 +171,20 @@ def distsrc_action_generator(source, target, env, for_signature): archive_wrapper.close() target_ext = str(target[0])[-3:] - if not target_ext in [ 'zip', 'tar' ]: + if not target_ext in ["zip", "tar"]: print("Invalid file format for distsrc. Must be tar or zip file") env.Exit(1) - git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % ( - git_path, target_ext, target[0]) + git_cmd = ( + '"%s" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD' + % (git_path, target_ext, target[0]) + ) return [ SCons.Action.Action(git_cmd, "Running git archive for $TARGET"), - SCons.Action.Action(run_distsrc_callbacks, "Running distsrc callbacks for $TARGET") + SCons.Action.Action( + run_distsrc_callbacks, "Running distsrc callbacks for $TARGET" + ), ] @@ -181,8 +193,10 @@ def add_callback(env, fn): def generate(env, **kwargs): - env.AddMethod(add_callback, 'AddDistSrcCallback') - env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(generator=distsrc_action_generator, ) + env.AddMethod(add_callback, "AddDistSrcCallback") + env["BUILDERS"]["__DISTSRC"] = SCons.Builder.Builder( + generator=distsrc_action_generator, + ) def DistSrc(env, target): result = env.__DISTSRC(target=target, source=[]) @@ -190,7 +204,7 @@ def generate(env, **kwargs): env.NoCache(result) return result - env.AddMethod(DistSrc, 'DistSrc') + env.AddMethod(DistSrc, "DistSrc") def exists(env): diff --git a/site_scons/site_tools/git_decider.py b/site_scons/site_tools/git_decider.py index fb0c9c7657b..1d56a302e17 100644 --- a/site_scons/site_tools/git_decider.py +++ b/site_scons/site_tools/git_decider.py @@ -20,21 +20,22 @@ def generate(env, **kwargs): # Grab the existing decider functions out of the environment # so we can invoke them when we can't use Git. base_decider = env.decide_target - if (base_decider != env.decide_source): + if base_decider != env.decide_source: raise Exception("Decider environment seems broken") from git import Git - thisRepo = Git(env.Dir('#').abspath) - currentGitState = thisRepo.ls_files('--stage') - lines = currentGitState.split('\n') + + thisRepo = Git(env.Dir("#").abspath) + currentGitState = thisRepo.ls_files("--stage") + lines = currentGitState.split("\n") file_sha1_map = {} for line in lines: line_content = line.split() file_sha1_map[env.File(line_content[3]).path] = line_content[1] - for m in thisRepo.ls_files('-m').split('\n'): - if (m): + for m in thisRepo.ls_files("-m").split("\n"): + if m: del file_sha1_map[env.File(m).path] def is_known_to_git(dependency): @@ -47,7 +48,7 @@ def generate(env, **kwargs): dependency.get_ninfo().csig = gitInfoForDep return False - if not (hasattr(prev_ni, 'csig')): + if not (hasattr(prev_ni, "csig")): prev_ni.csig = gitInfoForDep result = gitInfoForDep == prev_ni.csig @@ -64,7 +65,8 @@ def generate(env, **kwargs): def exists(env): try: from git import Git - Git(env.Dir('#').abspath).ls_files('--stage') + + Git(env.Dir("#").abspath).ls_files("--stage") return True except: return False diff --git a/site_scons/site_tools/gziptool.py b/site_scons/site_tools/gziptool.py index 3142d46cae9..dce6062c993 100644 --- a/site_scons/site_tools/gziptool.py +++ b/site_scons/site_tools/gziptool.py @@ -16,19 +16,20 @@ import SCons import gzip import shutil + def GZipAction(target, source, env, **kw): - dst_gzip = gzip.GzipFile(str(target[0]), 'wb') - with open(str(source[0]), 'rb') as src_file: + dst_gzip = gzip.GzipFile(str(target[0]), "wb") + with open(str(source[0]), "rb") as src_file: shutil.copyfileobj(src_file, dst_gzip) dst_gzip.close() + def generate(env, **kwargs): - env['BUILDERS']['__GZIPTOOL'] = SCons.Builder.Builder( + env["BUILDERS"]["__GZIPTOOL"] = SCons.Builder.Builder( action=SCons.Action.Action(GZipAction, "$GZIPTOOL_COMSTR") ) - env['GZIPTOOL_COMSTR'] = kwargs.get( - "GZIPTOOL_COMSTR", - "Compressing $TARGET with gzip" + env["GZIPTOOL_COMSTR"] = kwargs.get( + "GZIPTOOL_COMSTR", "Compressing $TARGET with gzip" ) def GZipTool(env, target, source): @@ -36,7 +37,7 @@ def generate(env, **kwargs): env.AlwaysBuild(result) return result - env.AddMethod(GZipTool, 'GZip') + env.AddMethod(GZipTool, "GZip") def exists(env): diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py index dfa7aa32748..2a7e72b8f5a 100644 --- a/site_scons/site_tools/icecream.py +++ b/site_scons/site_tools/icecream.py @@ -20,8 +20,8 @@ import subprocess from pkg_resources import parse_version -_icecream_version_min = parse_version('1.1rc2') -_ccache_nocpp2_version = parse_version('3.4.1') +_icecream_version_min = parse_version("1.1rc2") +_ccache_nocpp2_version = parse_version("3.4.1") # I'd prefer to use value here, but amazingly, its __str__ returns the @@ -39,6 +39,7 @@ class _BoundSubstitution: self.result = self.env.subst(self.expression) return self.result + def generate(env): if not exists(env): @@ -50,31 +51,33 @@ def generate(env): # little differently if ccache is in play. If you don't use the # TOOLS variable to configure your tools, you should explicitly # load the ccache tool before you load icecream. - if 'ccache' in env['TOOLS'] and not 'CCACHE_VERSION' in env: - env.Tool('ccache') - ccache_enabled = ('CCACHE_VERSION' in env) + if "ccache" in env["TOOLS"] and not "CCACHE_VERSION" in env: + env.Tool("ccache") + ccache_enabled = "CCACHE_VERSION" in env # Absoluteify, so we can derive ICERUN - env['ICECC'] = env.WhereIs('$ICECC') + env["ICECC"] = env.WhereIs("$ICECC") - if not 'ICERUN' in env: - env['ICERUN'] = env.File('$ICECC').File('icerun') + if not "ICERUN" in env: + env["ICERUN"] = env.File("$ICECC").File("icerun") # Absoluteify, for parity with ICECC - env['ICERUN'] = env.WhereIs('$ICERUN') + env["ICERUN"] = env.WhereIs("$ICERUN") # We can't handle sanitizer blacklist files, so disable icecc then, and just flow through # icerun to prevent slamming the local system with a huge -j value. if any( - f.startswith("-fsanitize-blacklist=") for fs in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS'] - for f in env[fs]): - env['ICECC'] = '$ICERUN' + f.startswith("-fsanitize-blacklist=") + for fs in ["CCFLAGS", "CFLAGS", "CXXFLAGS"] + for f in env[fs] + ): + env["ICECC"] = "$ICERUN" # Make CC and CXX absolute paths too. It is better for icecc. - env['CC'] = env.WhereIs('$CC') - env['CXX'] = env.WhereIs('$CXX') + env["CC"] = env.WhereIs("$CC") + env["CXX"] = env.WhereIs("$CXX") - if 'ICECC_VERSION' in env: + if "ICECC_VERSION" in env: # TODO: # # If ICECC_VERSION is a file, we are done. If it is a file @@ -86,8 +89,8 @@ def generate(env): pass else: # Make a predictable name for the toolchain - icecc_version_target_filename = env.subst('$CC$CXX').replace('/', '_') - icecc_version_dir = env.Dir('$BUILD_ROOT/scons/icecc') + icecc_version_target_filename = env.subst("$CC$CXX").replace("/", "_") + icecc_version_dir = env.Dir("$BUILD_ROOT/scons/icecc") icecc_version = icecc_version_dir.File(icecc_version_target_filename) # There is a weird ordering problem that occurs when the ninja generator @@ -104,14 +107,10 @@ def generate(env): # Make an isolated environment so that our setting of ICECC_VERSION in the environment # doesn't appear when executing icecc_create_env toolchain_env = env.Clone() - if toolchain_env.ToolchainIs('clang'): + if toolchain_env.ToolchainIs("clang"): toolchain = toolchain_env.Command( target=icecc_version, - source=[ - '$ICECC_CREATE_ENV', - '$CC', - '$CXX' - ], + source=["$ICECC_CREATE_ENV", "$CC", "$CXX"], action=[ "${SOURCES[0]} --clang ${SOURCES[1].abspath} /bin/true $TARGET", ], @@ -119,11 +118,7 @@ def generate(env): else: toolchain = toolchain_env.Command( target=icecc_version, - source=[ - '$ICECC_CREATE_ENV', - '$CC', - '$CXX' - ], + source=["$ICECC_CREATE_ENV", "$CC", "$CXX"], action=[ "${SOURCES[0]} --gcc ${SOURCES[1].abspath} ${SOURCES[2].abspath} $TARGET", ], @@ -138,13 +133,13 @@ def generate(env): # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if # we could obtain this from SCons. - _CSuffixes = ['.c'] - if not SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CSuffixes.append('.C') + _CSuffixes = [".c"] + if not SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CSuffixes.append(".C") - _CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++'] - if SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CXXSuffixes.append('.C') + _CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] + if SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CXXSuffixes.append(".C") suffixes = _CSuffixes + _CXXSuffixes for object_builder in SCons.Tool.createObjBuilders(env): @@ -153,44 +148,35 @@ def generate(env): if not suffix in suffixes: continue base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter([ - base, - icecc_toolchain_dependency_emitter - ]) + emitterdict[suffix] = SCons.Builder.ListEmitter( + [base, icecc_toolchain_dependency_emitter] + ) # Add ICECC_VERSION to the environment, pointed at the generated # file so that we can expand it in the realpath expressions for # CXXCOM and friends below. - env['ICECC_VERSION'] = icecc_version + env["ICECC_VERSION"] = icecc_version - if env.ToolchainIs('clang'): - env['ENV']['ICECC_CLANG_REMOTE_CPP'] = 1 + if env.ToolchainIs("clang"): + env["ENV"]["ICECC_CLANG_REMOTE_CPP"] = 1 - if ccache_enabled and env['CCACHE_VERSION'] >= _ccache_nocpp2_version: - env.AppendUnique( - CCFLAGS=[ - '-frewrite-includes' - ] - ) - env['ENV']['CCACHE_NOCPP2'] = 1 + if ccache_enabled and env["CCACHE_VERSION"] >= _ccache_nocpp2_version: + env.AppendUnique(CCFLAGS=["-frewrite-includes"]) + env["ENV"]["CCACHE_NOCPP2"] = 1 else: - env.AppendUnique( - CCFLAGS=[ - '-fdirectives-only' - ] - ) + env.AppendUnique(CCFLAGS=["-fdirectives-only"]) if ccache_enabled: - env['ENV']['CCACHE_NOCPP2'] = 1 + env["ENV"]["CCACHE_NOCPP2"] = 1 - if 'ICECC_SCHEDULER' in env: - env['ENV']['USE_SCHEDULER'] = env['ICECC_SCHEDULER'] + if "ICECC_SCHEDULER" in env: + env["ENV"]["USE_SCHEDULER"] = env["ICECC_SCHEDULER"] # Make sure it is a file node so that we can call `.abspath` on it # below. We must defer the abspath and realpath calls until after # the tool has completed and we have begun building, since we need # the real toolchain tarball to get created first on disk as part # of the DAG walk. - env['ICECC_VERSION'] = env.File('$ICECC_VERSION') + env["ICECC_VERSION"] = env.File("$ICECC_VERSION") # Not all platforms have the readlink utility, so create our own # generator for that. @@ -205,20 +191,21 @@ def generate(env): # nice to be able to memoize away this call, but we should # think carefully about where to store the result of such # memoization. - return os.path.realpath(env['ICECC_VERSION'].abspath) - env['ICECC_VERSION_GEN'] = icecc_version_gen + return os.path.realpath(env["ICECC_VERSION"].abspath) + + env["ICECC_VERSION_GEN"] = icecc_version_gen # Build up the string we will set in the environment to tell icecream # about the compiler package. - icecc_version_string = '${ICECC_VERSION_GEN}' - if 'ICECC_VERSION_ARCH' in env: - icecc_version_string = '${ICECC_VERSION_ARCH}:' + icecc_version_string + icecc_version_string = "${ICECC_VERSION_GEN}" + if "ICECC_VERSION_ARCH" in env: + icecc_version_string = "${ICECC_VERSION_ARCH}:" + icecc_version_string # Use our BoundSubstitition class to put ICECC_VERSION into # env['ENV'] with substitution in play. This lets us defer doing # the realpath in the generator above until after we have made the # tarball. - env['ENV']['ICECC_VERSION'] = _BoundSubstitution(env, icecc_version_string) + env["ENV"]["ICECC_VERSION"] = _BoundSubstitution(env, icecc_version_string) # If ccache is in play we actually want the icecc binary in the # CCACHE_PREFIX environment variable, not on the command line, per @@ -230,13 +217,13 @@ def generate(env): # compiler flags (things like -fdirectives-only), but we still try # to do the right thing. if ccache_enabled: - env['ENV']['CCACHE_PREFIX'] = _BoundSubstitution(env, '$ICECC') + env["ENV"]["CCACHE_PREFIX"] = _BoundSubstitution(env, "$ICECC") else: - icecc_string = '$( $ICECC $)' - env['CCCOM'] = ' '.join([icecc_string, env['CCCOM']]) - env['CXXCOM'] = ' '.join([icecc_string, env['CXXCOM']]) - env['SHCCCOM'] = ' '.join([icecc_string, env['SHCCCOM']]) - env['SHCXXCOM'] = ' '.join([icecc_string, env['SHCXXCOM']]) + icecc_string = "$( $ICECC $)" + env["CCCOM"] = " ".join([icecc_string, env["CCCOM"]]) + env["CXXCOM"] = " ".join([icecc_string, env["CXXCOM"]]) + env["SHCCCOM"] = " ".join([icecc_string, env["SHCCCOM"]]) + env["SHCXXCOM"] = " ".join([icecc_string, env["SHCXXCOM"]]) # Make link like jobs flow through icerun so we don't kill the # local machine. @@ -244,9 +231,9 @@ def generate(env): # TODO: Should we somehow flow SPAWN or other universal shell launch through # ICERUN to avoid saturating the local machine, and build something like # ninja pools? - env['ARCOM'] = '$( $ICERUN $) ' + env['ARCOM'] - env['LINKCOM'] = '$( $ICERUN $) ' + env['LINKCOM'] - env['SHLINKCOM'] = '$( $ICERUN $) ' + env['SHLINKCOM'] + env["ARCOM"] = "$( $ICERUN $) " + env["ARCOM"] + env["LINKCOM"] = "$( $ICERUN $) " + env["LINKCOM"] + env["SHLINKCOM"] = "$( $ICERUN $) " + env["SHLINKCOM"] # Uncomment these to debug your icecc integration # env['ENV']['ICECC_DEBUG'] = 'debug' @@ -255,29 +242,33 @@ def generate(env): def exists(env): - icecc = env.get('ICECC', False) + icecc = env.get("ICECC", False) if not icecc: return False icecc = env.WhereIs(icecc) if not icecc: return False - pipe = SCons.Action._subproc(env, - SCons.Util.CLVar(icecc) + ['--version'], stdin='devnull', - stderr='devnull', stdout=subprocess.PIPE) + pipe = SCons.Action._subproc( + env, + SCons.Util.CLVar(icecc) + ["--version"], + stdin="devnull", + stderr="devnull", + stdout=subprocess.PIPE, + ) if pipe.wait() != 0: return False validated = False for line in pipe.stdout: - line = line.decode('utf-8') + line = line.decode("utf-8") if validated: continue # consume all data - version_banner = re.search(r'^ICECC ', line) + version_banner = re.search(r"^ICECC ", line) if not version_banner: continue - icecc_version = re.split('ICECC (.+)', line) + icecc_version = re.split("ICECC (.+)", line) if len(icecc_version) < 2: continue icecc_version = parse_version(icecc_version[1]) diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py index 3edfc55a9b4..b041dc8cb7e 100755 --- a/site_scons/site_tools/idl_tool.py +++ b/site_scons/site_tools/idl_tool.py @@ -32,18 +32,20 @@ def idlc_emitter(target, source, env): first_source = str(source[0]) if not first_source.endswith(".idl"): - raise ValueError("Bad idl file name '%s', it must end with '.idl' " % (first_source)) + raise ValueError( + "Bad idl file name '%s', it must end with '.idl' " % (first_source) + ) base_file_name, _ = SCons.Util.splitext(str(target[0])) target_source = base_file_name + "_gen.cpp" target_header = base_file_name + "_gen.h" - env.Alias('generated-sources', [target_source, target_header]) + env.Alias("generated-sources", [target_source, target_header]) return [target_source, target_header], source -IDLCAction = SCons.Action.Action('$IDLCCOM', '$IDLCCOMSTR') +IDLCAction = SCons.Action.Action("$IDLCCOM", "$IDLCCOMSTR") def idl_scanner(node, env, path): @@ -53,24 +55,30 @@ def idl_scanner(node, env, path): nodes_deps_list = IDL_GLOBAL_DEPS[:] - with open(str(node), encoding='utf-8') as file_stream: - parsed_doc = idlc.parser.parse(file_stream, str(node), - idlc.CompilerImportResolver(['src'])) + with open(str(node), encoding="utf-8") as file_stream: + parsed_doc = idlc.parser.parse( + file_stream, str(node), idlc.CompilerImportResolver(["src"]) + ) if not parsed_doc.errors and parsed_doc.spec.imports is not None: - nodes_deps_list.extend([ - env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies) - ]) + nodes_deps_list.extend( + [env.File(d) for d in sorted(parsed_doc.spec.imports.dependencies)] + ) setattr(node.attributes, "IDL_NODE_DEPS", nodes_deps_list) return nodes_deps_list -idl_scanner = SCons.Scanner.Scanner(function=idl_scanner, skeys=['.idl']) +idl_scanner = SCons.Scanner.Scanner(function=idl_scanner, skeys=[".idl"]) # TODO: create a scanner for imports when imports are implemented -IDLCBuilder = SCons.Builder.Builder(action=IDLCAction, emitter=idlc_emitter, srcsuffx=".idl", - suffix=".cpp", source_scanner=idl_scanner) +IDLCBuilder = SCons.Builder.Builder( + action=IDLCAction, + emitter=idlc_emitter, + srcsuffx=".idl", + suffix=".cpp", + source_scanner=idl_scanner, +) def generate(env): @@ -78,22 +86,27 @@ def generate(env): env.Append(SCANNERS=idl_scanner) - env['BUILDERS']['Idlc'] = bld + env["BUILDERS"]["Idlc"] = bld sys.path.append(env.Dir("#buildscripts").get_abspath()) import buildscripts.idl.idl.compiler as idlc_mod + global idlc idlc = idlc_mod - env['IDLC'] = sys.executable + " buildscripts/idl/idlc.py" - env['IDLCFLAGS'] = '' - base_dir = env.subst('$BUILD_ROOT/$VARIANT_DIR').replace("#", "") - env['IDLCCOM'] = '$IDLC --include src --base_dir %s --target_arch $TARGET_ARCH --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES ' % ( - base_dir) - env['IDLCSUFFIX'] = '.idl' - - IDL_GLOBAL_DEPS = env.Glob('#buildscripts/idl/*.py') + env.Glob('#buildscripts/idl/idl/*.py') - env['IDL_HAS_INLINE_DEPENDENCIES'] = True + env["IDLC"] = sys.executable + " buildscripts/idl/idlc.py" + env["IDLCFLAGS"] = "" + base_dir = env.subst("$BUILD_ROOT/$VARIANT_DIR").replace("#", "") + env["IDLCCOM"] = ( + "$IDLC --include src --base_dir %s --target_arch $TARGET_ARCH --header ${TARGETS[1]} --output ${TARGETS[0]} $SOURCES " + % (base_dir) + ) + env["IDLCSUFFIX"] = ".idl" + + IDL_GLOBAL_DEPS = env.Glob("#buildscripts/idl/*.py") + env.Glob( + "#buildscripts/idl/idl/*.py" + ) + env["IDL_HAS_INLINE_DEPENDENCIES"] = True def exists(env): diff --git a/site_scons/site_tools/incremental_link.py b/site_scons/site_tools/incremental_link.py index 31f16a482da..dddd8b770f6 100644 --- a/site_scons/site_tools/incremental_link.py +++ b/site_scons/site_tools/incremental_link.py @@ -21,28 +21,29 @@ def _tag_as_precious(target, source, env): def generate(env): - builders = env['BUILDERS'] - for builder in ('Program', 'SharedLibrary', 'LoadableModule'): + builders = env["BUILDERS"] + for builder in ("Program", "SharedLibrary", "LoadableModule"): emitter = builders[builder].emitter - builders[builder].emitter = SCons.Builder.ListEmitter([ - emitter, - _tag_as_precious, - ]) + builders[builder].emitter = SCons.Builder.ListEmitter( + [emitter, _tag_as_precious,] + ) def exists(env): # By default, the windows linker is incremental, so unless # overridden in the environment with /INCREMENTAL:NO, the tool is # in play. - if env.TargetOSIs('windows') and not "/INCREMENTAL:NO" in env['LINKFLAGS']: + if env.TargetOSIs("windows") and not "/INCREMENTAL:NO" in env["LINKFLAGS"]: return True # On posix platforms, excluding darwin, we may have enabled # incremental linking. Check for the relevant flags. - if env.TargetOSIs('posix') and \ - not env.TargetOSIs('darwin') and \ - "-fuse-ld=gold" in env['LINKFLAGS'] and \ - "-Wl,--incremental" in env['LINKFLAGS']: + if ( + env.TargetOSIs("posix") + and not env.TargetOSIs("darwin") + and "-fuse-ld=gold" in env["LINKFLAGS"] + and "-Wl,--incremental" in env["LINKFLAGS"] + ): return True return False diff --git a/site_scons/site_tools/jsheader.py b/site_scons/site_tools/jsheader.py index 4c2765b7108..f842a8672e4 100644 --- a/site_scons/site_tools/jsheader.py +++ b/site_scons/site_tools/jsheader.py @@ -2,12 +2,15 @@ from SCons.Script import Action def jsToH(env, target, source): - return env.Command(target=target, source=['#site_scons/site_tools/jstoh.py'] + source, - action=Action('$PYTHON ${SOURCES[0]} $TARGET ${SOURCES[1:]}')) + return env.Command( + target=target, + source=["#site_scons/site_tools/jstoh.py"] + source, + action=Action("$PYTHON ${SOURCES[0]} $TARGET ${SOURCES[1:]}"), + ) def generate(env, **kw): - env.AddMethod(jsToH, 'JSHeader') + env.AddMethod(jsToH, "JSHeader") def exists(env): diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py index 50c0b66cf32..5f88043d3bb 100644 --- a/site_scons/site_tools/jstoh.py +++ b/site_scons/site_tools/jstoh.py @@ -9,37 +9,39 @@ def jsToHeader(target, source): h = [ '#include "mongo/base/string_data.h"', '#include "mongo/scripting/engine.h"', - 'namespace mongo {', - 'namespace JSFiles{', + "namespace mongo {", + "namespace JSFiles{", ] def lineToChars(s): - return ','.join(str(ord(c)) for c in (s.rstrip() + '\n')) + ',' + return ",".join(str(ord(c)) for c in (s.rstrip() + "\n")) + "," for s in source: filename = str(s) - objname = os.path.split(filename)[1].split('.')[0] - stringname = '_jscode_raw_' + objname + objname = os.path.split(filename)[1].split(".")[0] + stringname = "_jscode_raw_" + objname - h.append('constexpr char ' + stringname + "[] = {") + h.append("constexpr char " + stringname + "[] = {") - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: h.append(lineToChars(line)) h.append("0};") # symbols aren't exported w/o this - h.append('extern const JSFile %s;' % objname) - h.append('const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };' % - (objname, filename.replace('\\', '/'), stringname, stringname)) + h.append("extern const JSFile %s;" % objname) + h.append( + 'const JSFile %s = { "%s", StringData(%s, sizeof(%s) - 1) };' + % (objname, filename.replace("\\", "/"), stringname, stringname) + ) h.append("} // namespace JSFiles") h.append("} // namespace mongo") h.append("") - text = '\n'.join(h) + text = "\n".join(h) - with open(outFile, 'w') as out: + with open(outFile, "w") as out: try: out.write(text) finally: diff --git a/site_scons/site_tools/libtool.py b/site_scons/site_tools/libtool.py index 07527bb7f50..2ef2415627e 100644 --- a/site_scons/site_tools/libtool.py +++ b/site_scons/site_tools/libtool.py @@ -3,14 +3,14 @@ import SCons def generate(env): - env['AR'] = 'libtool' - env['ARCOM'] = '$AR -static -o $TARGET $ARFLAGS $SOURCES' - env['ARFLAGS'] = ["-s", "-no_warning_for_no_symbols"] + env["AR"] = "libtool" + env["ARCOM"] = "$AR -static -o $TARGET $ARFLAGS $SOURCES" + env["ARFLAGS"] = ["-s", "-no_warning_for_no_symbols"] # Disable running ranlib, since we added 's' above - env['RANLIBCOM'] = '' - env['RANLIBCOMSTR'] = 'Skipping ranlib for libtool generated target $TARGET' + env["RANLIBCOM"] = "" + env["RANLIBCOMSTR"] = "Skipping ranlib for libtool generated target $TARGET" def exists(env): - return env.detect('libtool') + return env.detect("libtool") diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py index c765adba744..400512e738a 100644 --- a/site_scons/site_tools/mongo_benchmark.py +++ b/site_scons/site_tools/mongo_benchmark.py @@ -1,75 +1,60 @@ -"""Pseudo-builders for building and registering benchmarks. +# Copyright 2019 MongoDB Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Pseudo-builders for building and registering benchmarks. """ -import os from SCons.Script import Action + def exists(env): return True -_benchmarks = [] -def register_benchmark(env, test): - _benchmarks.append(test.path) - env.Alias('$BENCHMARK_ALIAS', test) - -def benchmark_list_builder_action(env, target, source): - ofile = open(str(target[0]), 'w') - try: - for s in _benchmarks: - print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() def build_benchmark(env, target, source, **kwargs): bmEnv = env.Clone() - bmEnv.InjectThirdParty(libraries=['benchmark']) + bmEnv.InjectThirdParty(libraries=["benchmark"]) - if bmEnv.TargetOSIs('windows'): + if bmEnv.TargetOSIs("windows"): bmEnv.Append(LIBS=["ShLwApi.lib"]) - libdeps = kwargs.get('LIBDEPS', []) - libdeps.append('$BUILD_DIR/mongo/unittest/benchmark_main') + libdeps = kwargs.get("LIBDEPS", []) + libdeps.append("$BUILD_DIR/mongo/unittest/benchmark_main") - kwargs['LIBDEPS'] = libdeps - kwargs['INSTALL_ALIAS'] = ['benchmarks'] + kwargs["LIBDEPS"] = libdeps + kwargs["INSTALL_ALIAS"] = ["benchmarks"] - benchmark_test_components = {'tests', 'benchmarks'} - if ( - 'AIB_COMPONENT' in kwargs - and not kwargs['AIB_COMPONENT'].endswith('-benchmark') - ): - kwargs['AIB_COMPONENT'] += '-benchmark' + benchmark_test_components = {"tests", "benchmarks"} + if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENT"].endswith("-benchmark"): + kwargs["AIB_COMPONENT"] += "-benchmark" - if 'AIB_COMPONENTS_EXTRA' in kwargs: - benchmark_test_components = set(kwargs['AIB_COMPONENTS_EXTRA']).union(benchmark_test_components) + if "AIB_COMPONENTS_EXTRA" in kwargs: + benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( + benchmark_test_components + ) - kwargs['AIB_COMPONENTS_EXTRA'] = benchmark_test_components + kwargs["AIB_COMPONENTS_EXTRA"] = benchmark_test_components result = bmEnv.Program(target, source, **kwargs) - bmEnv.RegisterBenchmark(result[0]) - hygienic = bmEnv.GetOption('install-mode') == 'hygienic' - if not hygienic: - installed_test = bmEnv.Install("#/build/benchmark/", result[0]) - env.Command( - target="#@{}".format(os.path.basename(installed_test[0].path)), - source=installed_test, - action="${SOURCES[0]}" - ) - else: - test_bin_name = os.path.basename(result[0].path) - env.Command( - target="#@{}".format(test_bin_name), - source=["$PREFIX_BINDIR/{}".format(test_bin_name)], - action="${SOURCES[0]}" - ) + bmEnv.RegisterTest("$BENCHMARK_LIST", result[0]) + bmEnv.Alias("$BENCHMARK_ALIAS", result) return result def generate(env): - env.Command('$BENCHMARK_LIST', env.Value(_benchmarks), - Action(benchmark_list_builder_action, "Generating $TARGET")) - env.AddMethod(register_benchmark, 'RegisterBenchmark') - env.AddMethod(build_benchmark, 'Benchmark') - env.Alias('$BENCHMARK_ALIAS', '$BENCHMARK_LIST') + env.TestList("$BENCHMARK_LIST", source=[]) + env.AddMethod(build_benchmark, "Benchmark") + env.Alias("$BENCHMARK_ALIAS", "$BENCHMARK_LIST") diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py index 95e3b3b8d77..4cc89fc57bd 100644 --- a/site_scons/site_tools/mongo_integrationtest.py +++ b/site_scons/site_tools/mongo_integrationtest.py @@ -1,51 +1,38 @@ -'''Pseudo-builders for building and registering integration tests. -''' +""" +Pseudo-builders for building and registering integration tests. +""" from SCons.Script import Action + def exists(env): return True -_integration_tests = [] -def register_integration_test(env, test): - installed_test = env.Install('#/build/integration_tests/', test) - _integration_tests.append(installed_test[0].path) - env.Alias('$INTEGRATION_TEST_ALIAS', installed_test) - -def integration_test_list_builder_action(env, target, source): - ofile = open(str(target[0]), 'w') - try: - for s in _integration_tests: - print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() def build_cpp_integration_test(env, target, source, **kwargs): - libdeps = kwargs.get('LIBDEPS', []) - libdeps.append( '$BUILD_DIR/mongo/unittest/integration_test_main' ) + libdeps = kwargs.get("LIBDEPS", []) + libdeps.append("$BUILD_DIR/mongo/unittest/integration_test_main") - kwargs['LIBDEPS'] = libdeps - integration_test_components = {'tests', 'integration-tests'} + kwargs["LIBDEPS"] = libdeps + integration_test_components = {"tests", "integration-tests"} - if ( - 'AIB_COMPONENT' in kwargs - and not kwargs['AIB_COMPONENT'].endswith('-test') - ): - kwargs['AIB_COMPONENT'] += '-test' + if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENT"].endswith("-test"): + kwargs["AIB_COMPONENT"] += "-test" - if 'AIB_COMPONENTS_EXTRA' in kwargs: - kwargs['AIB_COMPONENTS_EXTRA'] = set(kwargs['AIB_COMPONENTS_EXTRA']).union(integration_test_components) + if "AIB_COMPONENTS_EXTRA" in kwargs: + kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( + integration_test_components + ) else: - kwargs['AIB_COMPONENTS_EXTRA'] = integration_test_components + kwargs["AIB_COMPONENTS_EXTRA"] = integration_test_components result = env.Program(target, source, **kwargs) - env.RegisterIntegrationTest(result[0]) + env.RegisterTest("$INTEGRATION_TEST_LIST", result[0]) + env.Alias("$INTEGRATION_TEST_ALIAS", result[0]) + return result def generate(env): - env.Command('$INTEGRATION_TEST_LIST', env.Value(_integration_tests), - Action(integration_test_list_builder_action, 'Generating $TARGET')) - env.AddMethod(register_integration_test, 'RegisterIntegrationTest') - env.AddMethod(build_cpp_integration_test, 'CppIntegrationTest') - env.Alias('$INTEGRATION_TEST_ALIAS', '$INTEGRATION_TEST_LIST') + env.TestList("$INTEGRATION_TEST_LIST", source=[]) + env.AddMethod(build_cpp_integration_test, "CppIntegrationTest") + env.Alias("$INTEGRATION_TEST_ALIAS", "$INTEGRATION_TEST_LIST") diff --git a/site_scons/site_tools/mongo_libfuzzer.py b/site_scons/site_tools/mongo_libfuzzer.py index c81f80dccf2..85041d6ae21 100644 --- a/site_scons/site_tools/mongo_libfuzzer.py +++ b/site_scons/site_tools/mongo_libfuzzer.py @@ -7,56 +7,50 @@ def exists(env): return True -_libfuzzer_tests = [] - - -def register_libfuzzer_test(env, test): - _libfuzzer_tests.append(test.path) - env.Alias('$LIBFUZZER_TEST_ALIAS', test) - - def libfuzzer_test_list_builder_action(env, target, source): - with open(str(target[0]), 'w') as ofile: + with open(str(target[0]), "w") as ofile: for s in _libfuzzer_tests: - print('\t' + str(s)) - ofile.write('%s\n' % s) - + print("\t" + str(s)) + ofile.write("%s\n" % s) def build_cpp_libfuzzer_test(env, target, source, **kwargs): myenv = env.Clone() - if not myenv.IsSanitizerEnabled('fuzzer'): + if not myenv.IsSanitizerEnabled("fuzzer"): return [] - libdeps = kwargs.get('LIBDEPS', []) - kwargs['LIBDEPS'] = libdeps - kwargs['INSTALL_ALIAS'] = ['tests'] - sanitizer_option = '-fsanitize=fuzzer' + libdeps = kwargs.get("LIBDEPS", []) + kwargs["LIBDEPS"] = libdeps + kwargs["INSTALL_ALIAS"] = ["tests"] + sanitizer_option = "-fsanitize=fuzzer" myenv.Prepend(LINKFLAGS=[sanitizer_option]) - libfuzzer_test_components = {'tests', 'fuzzertests'} - if ( - 'AIB_COMPONENT' in kwargs - and not kwargs['AIB_COMPONENTS'].endswith('-fuzzertest') + libfuzzer_test_components = {"tests", "fuzzertests"} + if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENTS"].endswith( + "-fuzzertest" ): - kwargs['AIB_COMPONENT'] += '-fuzzertest' + kwargs["AIB_COMPONENT"] += "-fuzzertest" - if 'AIB_COMPONENTS_EXTRA' in kwargs: - libfuzzer_test_components = set(kwargs['AIB_COMPONENTS_EXTRA']).union(libfuzzer_test_components) + if "AIB_COMPONENTS_EXTRA" in kwargs: + libfuzzer_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( + libfuzzer_test_components + ) - kwargs['AIB_COMPONENTS_EXTRA'] = libfuzzer_test_components + kwargs["AIB_COMPONENTS_EXTRA"] = libfuzzer_test_components result = myenv.Program(target, source, **kwargs) - myenv.RegisterLibfuzzerTest(result[0]) - hygienic = myenv.GetOption('install-mode') == 'hygienic' + myenv.RegisterTest("$LIBFUZZER_TEST_LIST", result[0]) + myenv.Alias("$LIBFUZZER_TEST_ALIAS", result) + + # TODO: remove when hygienic is default + hygienic = myenv.GetOption("install-mode") == "hygienic" if not hygienic: myenv.Install("#/build/libfuzzer_tests/", result[0]) + return result def generate(env): - env.Command('$LIBFUZZER_TEST_LIST', env.Value(_libfuzzer_tests), - Action(libfuzzer_test_list_builder_action, "Generating $TARGET")) - env.AddMethod(register_libfuzzer_test, 'RegisterLibfuzzerTest') - env.AddMethod(build_cpp_libfuzzer_test, 'CppLibfuzzerTest') - env.Alias('$LIBFUZZER_TEST_ALIAS', '$LIBFUZZER_TEST_LIST') + env.TestList("$LIBFUZZER_TEST_LIST", source=[]) + env.AddMethod(build_cpp_libfuzzer_test, "CppLibfuzzerTest") + env.Alias("$LIBFUZZER_TEST_ALIAS", "$LIBFUZZER_TEST_LIST") diff --git a/site_scons/site_tools/mongo_test_execution.py b/site_scons/site_tools/mongo_test_execution.py new file mode 100644 index 00000000000..7d9bc6a1a3f --- /dev/null +++ b/site_scons/site_tools/mongo_test_execution.py @@ -0,0 +1,71 @@ +# Copyright 2019 MongoDB Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + + +def generate_test_execution_aliases(env, test): + hygienic = env.GetOption("install-mode") == "hygienic" + if hygienic and getattr(test.attributes, "AIB_INSTALL_ACTIONS", []): + installed = getattr(test.attributes, "AIB_INSTALL_ACTIONS") + else: + installed = [test] + + target_name = os.path.basename(installed[0].get_path()) + command = env.Command( + target="#+{}".format(target_name), + source=installed, + action="${SOURCES[0]} $UNITTEST_FLAGS", + NINJA_POOL="console", + ) + + env.Alias("test-execution-aliases", command) + for source in test.sources: + source_base_name = os.path.basename(source.get_path()) + # Strip suffix + dot_idx = source_base_name.rfind(".") + suffix = source_base_name[dot_idx:] + if suffix in env["TEST_EXECUTION_SUFFIX_BLACKLIST"]: + continue + + source_name = source_base_name[:dot_idx] + if target_name == source_name: + continue + + source_command = env.Command( + target="#+{}".format(source_name), + source=installed, + action="${SOURCES[0]} -fileNameFilter $TEST_SOURCE_FILE_NAME $UNITTEST_FLAGS", + TEST_SOURCE_FILE_NAME=source_name, + NINJA_POOL="console", + ) + + env.Alias("test-execution-aliases", source_command) + + +def exists(env): + return True + + +def generate(env): + # Used for Ninja generator to collect the test execution aliases + env.Alias("test-execution-aliases") + env.AddMethod(generate_test_execution_aliases, "GenerateTestExecutionAliases") + + env["TEST_EXECUTION_SUFFIX_BLACKLIST"] = env.get( + "TEST_EXECUTION_SUFFIX_BLACKLIST", [".in"] + ) + + # TODO: Remove when the new ninja generator is the only supported generator + env["_NINJA_NO_TEST_EXECUTION"] = True diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py new file mode 100644 index 00000000000..8ea4a427354 --- /dev/null +++ b/site_scons/site_tools/mongo_test_list.py @@ -0,0 +1,68 @@ +# Copyright 2019 MongoDB Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Pseudo-builders for building test lists for Resmoke""" + +import SCons +from collections import defaultdict + +TEST_REGISTRY = defaultdict(list) + + +def register_test(env, file_name, test): + """Register test into the dictionary of tests for file_name""" + test_path = test.path + if getattr(test.attributes, "AIB_INSTALL_ACTIONS", []): + test_path = getattr(test.attributes, "AIB_INSTALL_ACTIONS")[0].path + + if SCons.Util.is_String(file_name): + file_name = env.File(file_name).path + else: + file_name = file_name.path + + TEST_REGISTRY[file_name].append(test_path) + env.GenerateTestExecutionAliases(test) + + +def test_list_builder_action(env, target, source): + """Build a test list used by resmoke.py to execute binary tests.""" + if SCons.Util.is_String(target[0]): + filename = env.subst(target[0]) + else: + filename = target[0].path + + source = [env.subst(s) if SCons.Util.is_String(s) else s.path for s in source] + + with open(filename, "w") as ofile: + tests = TEST_REGISTRY[filename] + if source: + tests.extend(source) + + for s in tests: + ofile.write("{}\n".format(str(s))) + + +TEST_LIST_BUILDER = SCons.Builder.Builder( + action=SCons.Action.FunctionAction( + test_list_builder_action, {"cmdstr": "Generating $TARGETS"}, + ) +) + + +def exists(env): + return True + + +def generate(env): + env.Append(BUILDERS={"TestList": TEST_LIST_BUILDER}) + env.AddMethod(register_test, "RegisterTest") diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py index 0ddd1b069ad..3d7e4e13e4e 100644 --- a/site_scons/site_tools/mongo_unittest.py +++ b/site_scons/site_tools/mongo_unittest.py @@ -1,66 +1,60 @@ -'''Pseudo-builders for building and registering unit tests.''' -import os - +# Copyright 2019 MongoDB Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pseudo-builders for building and registering unit tests.""" from SCons.Script import Action -def exists(env): - return True - - -_unittests = [] def register_unit_test(env, test): - _unittests.append(test.path) + """ + Kept around for compatibility. - hygienic = env.GetOption('install-mode') == 'hygienic' - if hygienic and getattr(test.attributes, "AIB_INSTALL_ACTIONS", []): - installed = getattr(test.attributes, "AIB_INSTALL_ACTIONS") - else: - installed = [test] + Some SConscripts called RegisterUnitTest directly. + """ + env.RegisterTest("$UNITTEST_LIST", test) + env.Alias("$UNITTEST_ALIAS", test) - env.Command( - target="#@{}".format(os.path.basename(installed[0].get_path())), - source=installed, - action="${SOURCES[0]}" - ) - env.Alias('$UNITTEST_ALIAS', test) +def exists(env): + return True -def unit_test_list_builder_action(env, target, source): - ofile = open(str(target[0]), 'w') - try: - for s in _unittests: - print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() def build_cpp_unit_test(env, target, source, **kwargs): - libdeps = kwargs.get('LIBDEPS', []) - libdeps.append( '$BUILD_DIR/mongo/unittest/unittest_main' ) - - kwargs['LIBDEPS'] = libdeps - unit_test_components = {'tests', 'unittests'} - if ( - 'AIB_COMPONENT' in kwargs - and not kwargs['AIB_COMPONENT'].endswith('-test') - ): - kwargs['AIB_COMPONENT'] += '-test' - - if 'AIB_COMPONENTS_EXTRA' in kwargs: - kwargs['AIB_COMPONENTS_EXTRA'] = set(kwargs['AIB_COMPONENTS_EXTRA']).union(unit_test_components) + libdeps = kwargs.get("LIBDEPS", []) + libdeps.append("$BUILD_DIR/mongo/unittest/unittest_main") + + kwargs["LIBDEPS"] = libdeps + unit_test_components = {"tests", "unittests"} + if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENT"].endswith("-test"): + kwargs["AIB_COMPONENT"] += "-test" + + if "AIB_COMPONENTS_EXTRA" in kwargs: + kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union( + unit_test_components + ) else: - kwargs['AIB_COMPONENTS_EXTRA'] = unit_test_components + kwargs["AIB_COMPONENTS_EXTRA"] = unit_test_components result = env.Program(target, source, **kwargs) - env.RegisterUnitTest(result[0]) + env.RegisterTest("$UNITTEST_LIST", result[0]) + env.Alias("$UNITTEST_ALIAS", result[0]) return result def generate(env): - env.Command('$UNITTEST_LIST', env.Value(_unittests), - Action(unit_test_list_builder_action, 'Generating $TARGET')) - env.AddMethod(register_unit_test, 'RegisterUnitTest') - env.AddMethod(build_cpp_unit_test, 'CppUnitTest') - env.Alias('$UNITTEST_ALIAS', '$UNITTEST_LIST') + env.TestList("$UNITTEST_LIST", source=[]) + env.AddMethod(build_cpp_unit_test, "CppUnitTest") + env.AddMethod(register_unit_test, "RegisterUnitTest") + env.Alias("$UNITTEST_ALIAS", "$UNITTEST_LIST") diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py index 2221ffca516..b80d7180e66 100644 --- a/site_scons/site_tools/ninja.py +++ b/site_scons/site_tools/ninja.py @@ -29,6 +29,7 @@ from SCons.Script import COMMAND_LINE_TARGETS NINJA_SYNTAX = "NINJA_SYNTAX" NINJA_RULES = "__NINJA_CUSTOM_RULES" +NINJA_POOLS = "__NINJA_CUSTOM_POOLS" NINJA_CUSTOM_HANDLERS = "__NINJA_CUSTOM_HANDLERS" NINJA_BUILD = "NINJA_BUILD" NINJA_OUTPUTS = "__NINJA_OUTPUTS" @@ -301,13 +302,18 @@ class SConsToNinjaTranslator: # Make sure we didn't generate an empty cmdline if cmdline: - return { + ninja_build = { "outputs": all_outputs, "rule": "CMD", "variables": {"cmd": cmdline}, "implicit": dependencies, } + if node.env and node.env.get("NINJA_POOL", None) is not None: + ninja_build["pool"] = node.env["pool"] + + return ninja_build + elif results[0]["rule"] == "phony": return { "outputs": all_outputs, @@ -458,6 +464,7 @@ class NinjaState: self.rules["CMD_W_DEPS"]["depfile"] = "$out.d" self.rules.update(env.get(NINJA_RULES, {})) + self.pools.update(env.get(NINJA_POOLS, {})) def generate_builds(self, node): """Generate a ninja build rule for node and it's children.""" @@ -517,8 +524,8 @@ class NinjaState: ninja.comment("Generated by scons. DO NOT EDIT.") - for pool in self.pools: - ninja.pool(pool, self.pools[pool]) + for pool_name, size in self.pools.items(): + ninja.pool(pool_name, size) for var, val in self.variables.items(): ninja.variable(var, val) @@ -809,13 +816,33 @@ def get_command(env, node, action): # pylint: disable=too-many-branches setattr(node.attributes, "NINJA_ENV_ENV", command_env) - return { + ninja_build = { "outputs": outputs, "implicit": implicit, "rule": rule, "variables": {"cmd": command_env + cmd}, } + # Don't use sub_env here because we require that NINJA_POOL be set + # on a per-builder call basis to prevent accidental strange + # behavior like env['NINJA_POOL'] = 'console' and sub_env can be + # the global Environment object if node.env is None. + # Example: + # + # Allowed: + # + # env.Command("ls", NINJA_POOL="ls_pool") + # + # Not allowed and ignored: + # + # env["NINJA_POOL"] = "ls_pool" + # env.Command("ls") + # + if node.env and node.env.get("NINJA_POOL", None) is not None: + ninja_build["pool"] = node.env["NINJA_POOL"] + + return ninja_build + def ninja_builder(env, target, source): """Generate a build.ninja for source.""" @@ -907,6 +934,11 @@ def register_custom_rule(env, rule, command, description=""): } +def register_custom_pool(env, pool, size): + """Allows the creation of custom Ninja pools""" + env[NINJA_POOLS][pool] = size + + def ninja_csig(original): """Return a dummy csig""" @@ -1107,6 +1139,12 @@ def generate(env): env[NINJA_RULES] = {} env.AddMethod(register_custom_rule, "NinjaRule") + # Provides a mechanism for inject custom Ninja pools which can + # be used by providing the NINJA_POOL="name" as an + # OverrideEnvironment variable in a builder call. + env[NINJA_POOLS] = {} + env.AddMethod(register_custom_pool, "NinjaPool") + # Add the ability to register custom NinjaRuleMappings for Command # builders. We don't store this dictionary in the env to prevent # accidental deletion of the CC/XXCOM mappings. You can still diff --git a/site_scons/site_tools/separate_debug.py b/site_scons/site_tools/separate_debug.py index 211a31db84a..4347f3031ab 100644 --- a/site_scons/site_tools/separate_debug.py +++ b/site_scons/site_tools/separate_debug.py @@ -26,16 +26,16 @@ def _update_builder(env, builder, bitcode): if origin: origin_results = old_scanner(origin, env, path) for origin_result in origin_results: - origin_result_debug_file = getattr(origin_result.attributes, "separate_debug_file", - None) + origin_result_debug_file = getattr( + origin_result.attributes, "separate_debug_file", None + ) if origin_result_debug_file: results.append(origin_result_debug_file) # TODO: Do we need to do the same sort of drag along for bcsymbolmap files? return results builder.target_scanner = SCons.Scanner.Scanner( - function=new_scanner, - path_function=old_path_function, + function=new_scanner, path_function=old_path_function, ) base_action = builder.action @@ -49,26 +49,38 @@ def _update_builder(env, builder, bitcode): # us. We could then also remove a lot of the compiler and sysroot # setup from the etc/scons/xcode_*.vars files, which would be a # win as well. - if env.TargetOSIs('darwin'): + if env.TargetOSIs("darwin"): if bitcode: base_action.list.append( SCons.Action.Action( "dsymutil -num-threads=1 $TARGET --symbol-map=${TARGET}.bcsymbolmap -o ${TARGET}.dSYM", - "Generating debug info for $TARGET into ${TARGET}.dSYM")) + "Generating debug info for $TARGET into ${TARGET}.dSYM", + ) + ) else: base_action.list.append( - SCons.Action.Action("dsymutil -num-threads=1 $TARGET -o ${TARGET}.dSYM", - "Generating debug info for $TARGET into ${TARGET}.dSYM")) - base_action.list.append(SCons.Action.Action("strip -Sx ${TARGET}", "Stripping ${TARGET}")) - elif env.TargetOSIs('posix'): - base_action.list.extend([ - SCons.Action.Action("${OBJCOPY} --only-keep-debug $TARGET ${TARGET}.debug", - "Generating debug info for $TARGET into ${TARGET}.debug"), - SCons.Action.Action( - "${OBJCOPY} --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}", - "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug"), - ]) + SCons.Action.Action( + "dsymutil -num-threads=1 $TARGET -o ${TARGET}.dSYM", + "Generating debug info for $TARGET into ${TARGET}.dSYM", + ) + ) + base_action.list.append( + SCons.Action.Action("strip -Sx ${TARGET}", "Stripping ${TARGET}") + ) + elif env.TargetOSIs("posix"): + base_action.list.extend( + [ + SCons.Action.Action( + "${OBJCOPY} --only-keep-debug $TARGET ${TARGET}.debug", + "Generating debug info for $TARGET into ${TARGET}.debug", + ), + SCons.Action.Action( + "${OBJCOPY} --strip-debug --add-gnu-debuglink ${TARGET}.debug ${TARGET}", + "Stripping debug info from ${TARGET} and adding .gnu.debuglink to ${TARGET}.debug", + ), + ] + ) else: pass @@ -79,15 +91,15 @@ def _update_builder(env, builder, bitcode): def new_emitter(target, source, env): bitcode_file = None - if env.TargetOSIs('darwin'): + if env.TargetOSIs("darwin"): debug_file = env.Entry(str(target[0]) + ".dSYM") env.Precious(debug_file) if bitcode: bitcode_file = env.File(str(target[0]) + ".bcsymbolmap") - elif env.TargetOSIs('posix'): + elif env.TargetOSIs("posix"): debug_file = env.File(str(target[0]) + ".debug") - elif env.TargetOSIs('windows'): - debug_file = env.File(env.subst('${PDB}', target=target)) + elif env.TargetOSIs("windows"): + debug_file = env.File(env.subst("${PDB}", target=target)) else: pass @@ -121,18 +133,22 @@ def generate(env): # later was a better time to address this. We should also consider # moving all bitcode setup into a separate tool. bitcode = False - if env.TargetOSIs('darwin') and any(flag == "-fembed-bitcode" for flag in env['LINKFLAGS']): + if env.TargetOSIs("darwin") and any( + flag == "-fembed-bitcode" for flag in env["LINKFLAGS"] + ): bitcode = True - env.AppendUnique(LINKFLAGS=[ - "-Wl,-bitcode_hide_symbols", - "-Wl,-bitcode_symbol_map,${TARGET}.bcsymbolmap", - ]) + env.AppendUnique( + LINKFLAGS=[ + "-Wl,-bitcode_hide_symbols", + "-Wl,-bitcode_symbol_map,${TARGET}.bcsymbolmap", + ] + ) # TODO: For now, not doing this for programs. Need to update # auto_install_binaries to understand to install the debug symbol # for target X to the same target location as X. - for builder in ['Program', 'SharedLibrary', 'LoadableModule']: - _update_builder(env, env['BUILDERS'][builder], bitcode) + for builder in ["Program", "SharedLibrary", "LoadableModule"]: + _update_builder(env, env["BUILDERS"][builder], bitcode) def exists(env): diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py index c57b9e96822..9fe960d1b70 100644 --- a/site_scons/site_tools/split_dwarf.py +++ b/site_scons/site_tools/split_dwarf.py @@ -14,24 +14,24 @@ import SCons -_splitDwarfFlag = '-gsplit-dwarf' +_splitDwarfFlag = "-gsplit-dwarf" # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if # we could obtain this from SCons. -_CSuffixes = ['.c'] -if not SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CSuffixes.append('.C') +_CSuffixes = [".c"] +if not SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CSuffixes.append(".C") -_CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++'] -if SCons.Util.case_sensitive_suffixes('.c', '.C'): - _CXXSuffixes.append('.C') +_CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"] +if SCons.Util.case_sensitive_suffixes(".c", ".C"): + _CXXSuffixes.append(".C") def _dwo_emitter(target, source, env): new_targets = [] for t in target: base, ext = SCons.Util.splitext(str(t)) - if not any(ext == env[osuffix] for osuffix in ['OBJSUFFIX', 'SHOBJSUFFIX']): + if not any(ext == env[osuffix] for osuffix in ["OBJSUFFIX", "SHOBJSUFFIX"]): continue # TODO: Move 'dwo' into DWOSUFFIX so it can be customized? For # now, GCC doesn't let you control the output filename, so it @@ -44,12 +44,12 @@ def _dwo_emitter(target, source, env): def generate(env): suffixes = [] - if _splitDwarfFlag in env['CCFLAGS']: + if _splitDwarfFlag in env["CCFLAGS"]: suffixes = _CSuffixes + _CXXSuffixes else: - if _splitDwarfFlag in env['CFLAGS']: + if _splitDwarfFlag in env["CFLAGS"]: suffixes.extend(_CSuffixes) - if _splitDwarfFlag in env['CXXFLAGS']: + if _splitDwarfFlag in env["CXXFLAGS"]: suffixes.extend(_CXXSuffixes) for object_builder in SCons.Tool.createObjBuilders(env): @@ -58,11 +58,8 @@ def generate(env): if not suffix in suffixes: continue base = emitterdict[suffix] - emitterdict[suffix] = SCons.Builder.ListEmitter([ - base, - _dwo_emitter, - ]) + emitterdict[suffix] = SCons.Builder.ListEmitter([base, _dwo_emitter,]) def exists(env): - return any(_splitDwarfFlag in env[f] for f in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS']) + return any(_splitDwarfFlag in env[f] for f in ["CCFLAGS", "CFLAGS", "CXXFLAGS"]) diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py index 145589c7996..cbc0cd03890 100644 --- a/site_scons/site_tools/thin_archive.py +++ b/site_scons/site_tools/thin_archive.py @@ -19,21 +19,25 @@ import subprocess def exists(env): - if not 'AR' in env: + if not "AR" in env: return False - ar = env.subst(env['AR']) + ar = env.subst(env["AR"]) if not ar: return False # If the user has done anything confusing with ARFLAGS, bail out. We want to find # an item in ARFLAGS of the exact form 'rc'. - if not "rc" in env['ARFLAGS']: + if not "rc" in env["ARFLAGS"]: return False - pipe = SCons.Action._subproc(env, - SCons.Util.CLVar(ar) + ['--version'], stdin='devnull', - stderr='devnull', stdout=subprocess.PIPE) + pipe = SCons.Action._subproc( + env, + SCons.Util.CLVar(ar) + ["--version"], + stdin="devnull", + stderr="devnull", + stdout=subprocess.PIPE, + ) if pipe.wait() != 0: return False @@ -41,10 +45,11 @@ def exists(env): for line in pipe.stdout: if found: continue # consume all data - found = re.search(r'^GNU ar|^LLVM', line.decode('utf-8')) + found = re.search(r"^GNU ar|^LLVM", line.decode("utf-8")) return bool(found) + def _add_emitter(builder): base_emitter = builder.emitter @@ -70,23 +75,25 @@ def _add_scanner(builder): new_results.extend(base.children()) return new_results - builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, - path_function=path_function) + builder.target_scanner = SCons.Scanner.Scanner( + function=new_scanner, path_function=path_function + ) def generate(env): if not exists(env): return - env['ARFLAGS'] = SCons.Util.CLVar( - [arflag if arflag != "rc" else "rcsTD" for arflag in env['ARFLAGS']]) + env["ARFLAGS"] = SCons.Util.CLVar( + [arflag if arflag != "rc" else "rcsTD" for arflag in env["ARFLAGS"]] + ) # Disable running ranlib, since we added 's' above - env['RANLIBCOM'] = '' - env['RANLIBCOMSTR'] = 'Skipping ranlib for thin archive $TARGET' + env["RANLIBCOM"] = "" + env["RANLIBCOMSTR"] = "Skipping ranlib for thin archive $TARGET" - for builder in ['StaticLibrary', 'SharedArchive']: - _add_emitter(env['BUILDERS'][builder]) + for builder in ["StaticLibrary", "SharedArchive"]: + _add_emitter(env["BUILDERS"][builder]) - for builder in ['SharedLibrary', 'LoadableModule', 'Program']: - _add_scanner(env['BUILDERS'][builder]) + for builder in ["SharedLibrary", "LoadableModule", "Program"]: + _add_scanner(env["BUILDERS"][builder]) diff --git a/site_scons/site_tools/vcredist.py b/site_scons/site_tools/vcredist.py index 62e14010329..67f34e69724 100644 --- a/site_scons/site_tools/vcredist.py +++ b/site_scons/site_tools/vcredist.py @@ -19,10 +19,12 @@ import winreg import SCons + def exists(env): - result = 'msvc' in env['TOOLS'] + result = "msvc" in env["TOOLS"] return result + # How to locate the Merge Modules path is described in: # # - VS2019: https://docs.microsoft.com/en-us/visualstudio/releases/2019/redistribution#visual-c-runtime-files @@ -42,17 +44,18 @@ def exists(env): # # TODO: Expand this map as needed. target_arch_expansion_map = { - 'amd64' : 'x64', - 'arm' : None, - 'arm64' : 'arm64', - 'emt64' : 'x64', - 'i386' : 'x86', - 'x86' : 'x86', - 'x86_64' : 'x64', + "amd64": "x64", + "arm": None, + "arm64": "arm64", + "emt64": "x64", + "i386": "x86", + "x86": "x86", + "x86_64": "x64", } + def _get_programfiles(): - result = os.getenv('ProgramFiles(x86)') + result = os.getenv("ProgramFiles(x86)") # When we run this under cygwin, the environment is broken, fall # back to hard coded C:\Program Files (x86) if result is None: @@ -61,20 +64,22 @@ def _get_programfiles(): return None return result + def _get_merge_module_name_for_feature(env, feature): - version_components = env['MSVC_VERSION'].split('.') + version_components = env["MSVC_VERSION"].split(".") return "Microsoft_VC{msvc_major}{msvc_minor}_{feature}_{target_arch}.msm".format( msvc_major=version_components[0], msvc_minor=version_components[1], feature=feature, - target_arch=target_arch_expansion_map[env.subst('$TARGET_ARCH')] + target_arch=target_arch_expansion_map[env.subst("$TARGET_ARCH")], ) + def generate(env): if not exists(env): return - env.Tool('msvc') + env.Tool("msvc") env.AddMethod(_get_merge_module_name_for_feature, "GetMergeModuleNameForFeature") @@ -85,7 +90,7 @@ def generate(env): # https://en.wikipedia.org/wiki/Microsoft_Visual_C%2B%2B#Internal_version_numbering # for details on the various version numbers in play for # the Microsoft toolchain. - msvc_major, msvc_minor = env['MSVC_VERSION'].split('.') + msvc_major, msvc_minor = env["MSVC_VERSION"].split(".") if msvc_major != "14": return @@ -104,7 +109,7 @@ def generate(env): # On VS2015 the merge modules are in the program files directory, # not under the VS install dir. - if msvc_minor == '0': + if msvc_minor == "0": if not programfilesx86: programfilesx86 = _get_programfiles() @@ -113,9 +118,9 @@ def generate(env): mergemodulepath = os.path.join(programfilesx86, "Common Files", "Merge Modules") if os.path.isdir(mergemodulepath): - env['MSVS']['VCREDISTMERGEMODULEPATH'] = mergemodulepath + env["MSVS"]["VCREDISTMERGEMODULEPATH"] = mergemodulepath - if not 'VSINSTALLDIR' in env['MSVS']: + if not "VSINSTALLDIR" in env["MSVS"]: # Compute a VS version based on the VC version. VC 14.0 is VS 2015, VC # 14.1 is VS 2017. Also compute the next theoretical version by @@ -123,7 +128,9 @@ def generate(env): # that we can use as an argument to the -version flag to vswhere. vs_version = int(msvc_major) + int(msvc_minor) vs_version_next = vs_version + 1 - vs_version_range = '[{vs_version}.0, {vs_version_next}.0)'.format(vs_version=vs_version, vs_version_next=vs_version_next) + vs_version_range = "[{vs_version}.0, {vs_version_next}.0)".format( + vs_version=vs_version, vs_version_next=vs_version_next + ) if not programfilesx86: programfilesx86 = _get_programfiles() @@ -131,28 +138,52 @@ def generate(env): return # Use vswhere (it has a fixed stable path) to query where Visual Studio is installed. - env['MSVS']['VSINSTALLDIR'] = subprocess.check_output([os.path.join(programfilesx86, "Microsoft Visual Studio", "Installer", "vswhere.exe"), "-version", vs_version_range, "-property", "installationPath", "-nologo"]).decode('utf-8').strip() - - vsinstall_dir = env['MSVS']['VSINSTALLDIR'] + env["MSVS"]["VSINSTALLDIR"] = ( + subprocess.check_output( + [ + os.path.join( + programfilesx86, + "Microsoft Visual Studio", + "Installer", + "vswhere.exe", + ), + "-version", + vs_version_range, + "-property", + "installationPath", + "-nologo", + ] + ) + .decode("utf-8") + .strip() + ) + + vsinstall_dir = env["MSVS"]["VSINSTALLDIR"] # Combine and set the full merge module path redist_root = os.path.join(vsinstall_dir, "VC", "Redist", "MSVC") if not os.path.isdir(redist_root): return - env['MSVS']['VCREDISTROOT'] = redist_root + env["MSVS"]["VCREDISTROOT"] = redist_root # Check the registry key that has the runtime lib version try: # TOOO: This x64 needs to be abstracted away. Is it the host # arch, or the target arch? My guess is host. - vsruntime_key_name = "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format(msvc_major=msvc_major) + vsruntime_key_name = "SOFTWARE\\Microsoft\\VisualStudio\\{msvc_major}.0\\VC\\Runtimes\\x64".format( + msvc_major=msvc_major + ) vsruntime_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vsruntime_key_name) - vslib_version, vslib_version_type = winreg.QueryValueEx(vsruntime_key, "Version") + vslib_version, vslib_version_type = winreg.QueryValueEx( + vsruntime_key, "Version" + ) except WindowsError: return # Fallback to directory search if we don't find the expected version - redist_path = os.path.join(redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1)) + redist_path = os.path.join( + redist_root, re.match("v(\d+\.\d+\.\d+)\.\d+", vslib_version).group(1) + ) if not os.path.isdir(redist_path): redist_path = None dirs = os.listdir(redist_root) @@ -164,12 +195,12 @@ def generate(env): break else: return - env['MSVS']['VCREDISTPATH'] = redist_path + env["MSVS"]["VCREDISTPATH"] = redist_path if mergemodulepath is None and msvc_minor != "0": mergemodulepath = os.path.join(redist_path, "MergeModules") if os.path.isdir(mergemodulepath): - env['MSVS']['VCREDISTMERGEMODULEPATH'] = mergemodulepath + env["MSVS"]["VCREDISTMERGEMODULEPATH"] = mergemodulepath # Keep these in preference order. The way with the {} in between # the dots appears to be the more modern form, but we select the @@ -185,15 +216,17 @@ def generate(env): "vc_redist.{}.exe", ] - expansion = target_arch_expansion_map.get(env.subst('$TARGET_ARCH'), None) + expansion = target_arch_expansion_map.get(env.subst("$TARGET_ARCH"), None) if not expansion: return - vcredist_candidates = [c.format(expansion) for c in vcredist_search_template_sequence] + vcredist_candidates = [ + c.format(expansion) for c in vcredist_search_template_sequence + ] for candidate in vcredist_candidates: candidate = os.path.join(redist_path, candidate) if os.path.isfile(candidate): break else: return - env['MSVS']['VCREDISTEXE'] = candidate + env["MSVS"]["VCREDISTEXE"] = candidate diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py index 5ddebb2e003..d4dbc23c8ef 100644 --- a/site_scons/site_tools/xcode.py +++ b/site_scons/site_tools/xcode.py @@ -1,12 +1,16 @@ import os + def exists(env): - return env.Detect('xcrun') + return env.Detect("xcrun") + def generate(env): if not exists(env): return - if 'DEVELOPER_DIR' in os.environ: - env['ENV']['DEVELOPER_DIR'] = os.environ['DEVELOPER_DIR'] - print("NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands") + if "DEVELOPER_DIR" in os.environ: + env["ENV"]["DEVELOPER_DIR"] = os.environ["DEVELOPER_DIR"] + print( + "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands" + ) |