summaryrefslogtreecommitdiff
path: root/site_scons
diff options
context:
space:
mode:
authorRyan Egesdahl <ryan.egesdahl@mongodb.com>2020-10-05 10:23:55 -0700
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-10-06 02:57:27 +0000
commit78bb3f3c8a658a5a9fec8d55864e426382f68bd0 (patch)
tree73de4f2c4cd991d8f0225bd87cafd0b5b9219977 /site_scons
parentae16f30da8c3acc89ead3ff6a753b2ad3985121d (diff)
downloadmongo-78bb3f3c8a658a5a9fec8d55864e426382f68bd0.tar.gz
SERVER-50363 Merge --build-tools=next into stable
Merging the following fixes into the stable version of the build tools and migrate fully from --ninja=next to the --build-tools=next: * SERVER-47598 * SERVER-50010 * SERVER-47943 * SERVER-50125 * SERVER-50376 * SERVER-49457 * SERVER-49493 * SERVER-49036 * SERVER-48966
Diffstat (limited to 'site_scons')
-rw-r--r--site_scons/site_tools/ccache.py84
-rw-r--r--site_scons/site_tools/icecream.py576
-rw-r--r--site_scons/site_tools/ninja.py229
3 files changed, 641 insertions, 248 deletions
diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py
index 1a34571cc7e..2a894919f8d 100644
--- a/site_scons/site_tools/ccache.py
+++ b/site_scons/site_tools/ccache.py
@@ -30,24 +30,21 @@ from pkg_resources import parse_version
# This is the oldest version of ccache that offers support for -gsplit-dwarf
_ccache_version_min = parse_version("3.2.3")
-_ccache_version_found = None
def exists(env):
"""Look for a viable ccache implementation that meets our version requirements."""
-
- # If we already generated, we definitely exist
- if "CCACHE_VERSION" in env:
- return True
-
- ccache = env.get("CCACHE", False)
- if not ccache:
+ if not env.subst("$CCACHE"):
return False
- ccache = env.WhereIs(ccache)
+ ccache = env.WhereIs("$CCACHE")
if not ccache:
+ print(f"Error: ccache not found at {env['CCACHE']}")
return False
+ if 'CCACHE_VERSION' in env and env['CCACHE_VERSION'] >= _ccache_version_min:
+ return True
+
pipe = SCons.Action._subproc(
env,
SCons.Util.CLVar(ccache) + ["--version"],
@@ -57,6 +54,7 @@ def exists(env):
)
if pipe.wait() != 0:
+ print(f"Error: failed to execute '{env['CCACHE']}'")
return False
validated = False
@@ -70,25 +68,23 @@ def exists(env):
ccache_version = re.split("ccache version (.+)", line)
if len(ccache_version) < 2:
continue
- global _ccache_version_found
- _ccache_version_found = parse_version(ccache_version[1])
- if _ccache_version_found >= _ccache_version_min:
+ ccache_version = parse_version(ccache_version[1])
+ if ccache_version >= _ccache_version_min:
validated = True
+ if validated:
+ env['CCACHE_VERSION'] = ccache_version
+ else:
+ print(f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}")
+
return validated
def generate(env):
"""Add ccache support."""
- # If we have already generated the tool, don't generate it again.
- if "CCACHE_VERSION" in env:
- return
-
- # If we can't find ccache, or it is too old a version, don't
- # generate.
- if not exists(env):
- return
+ # Absoluteify
+ env["CCACHE"] = env.WhereIs("$CCACHE")
# Propagate CCACHE related variables into the command environment
for var, host_value in os.environ.items():
@@ -104,23 +100,53 @@ def generate(env):
if env.ToolchainIs("clang"):
env.AppendUnique(CCFLAGS=["-Qunused-arguments"])
- # Record our found CCACHE_VERSION. Other tools that need to know
- # about ccache (like iecc) should query this variable to determine
- # if ccache is active. Looking at the CCACHE variable in the
- # environment is not sufficient, since the user may have set it,
- # but it doesn't work or is out of date.
- env["CCACHE_VERSION"] = _ccache_version_found
+ # Check whether icecream is requested and is a valid tool.
+ if "ICECC" in env:
+ icecream = SCons.Tool.Tool('icecream')
+ icecream_enabled = bool(icecream) and icecream.exists(env)
+ else:
+ icecream_enabled = False
# Set up a performant ccache configuration. Here, we don't use a second preprocessor and
# pass preprocessor arguments that deterministically expand source files so a stable
# hash can be calculated on them. This both reduces the amount of work ccache needs to
# do and increases the likelihood of a cache hit.
- env["ENV"]["CCACHE_NOCPP2"] = 1
if env.ToolchainIs("clang"):
+ env["ENV"].pop("CCACHE_CPP2", None)
+ env["ENV"]["CCACHE_NOCPP2"] = "1"
env.AppendUnique(CCFLAGS=["-frewrite-includes"])
elif env.ToolchainIs("gcc"):
- env.AppendUnique(CCFLAGS=["-fdirectives-only"])
-
+ if icecream_enabled:
+ # Newer versions of Icecream will drop -fdirectives-only from
+ # preprocessor and compiler flags if it does not find a remote
+ # build host to build on. ccache, on the other hand, will not
+ # pass the flag to the compiler if CCACHE_NOCPP2=1, but it will
+ # pass it to the preprocessor. The combination of setting
+ # CCACHE_NOCPP2=1 and passing the flag can lead to build
+ # failures.
+
+ # See: https://jira.mongodb.org/browse/SERVER-48443
+ # We have an open issue with Icecream and ccache to resolve the
+ # cause of these build failures. Once the bug is resolved and
+ # the fix is deployed, we can remove this entire conditional
+ # branch and make it like the one for clang.
+ # TODO: https://github.com/icecc/icecream/issues/550
+ env["ENV"].pop("CCACHE_CPP2", None)
+ env["ENV"]["CCACHE_NOCPP2"] = "1"
+ else:
+ env["ENV"].pop("CCACHE_NOCPP2", None)
+ env["ENV"]["CCACHE_CPP2"] = "1"
+ env.AppendUnique(CCFLAGS=["-fdirectives-only"])
+
+ # Ensure ccache accounts for any extra files in use that affects the generated object
+ # file. This can be used for situations where a file is passed as an argument to a
+ # compiler parameter and differences in the file need to be accounted for in the
+ # hash result to prevent erroneous cache hits.
+ if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]:
+ env["ENV"]["CCACHE_EXTRAFILES"] = ":".join([
+ blackfile.path
+ for blackfile in env["CCACHE_EXTRAFILES"]
+ ])
# Make a generator to expand to CCACHE in the case where we are
# not a conftest. We don't want to use ccache for configure tests
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index 979bba7e3b8..7456ed0cc8f 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -20,14 +20,15 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-import SCons
-
import os
import re
import subprocess
+import urllib
from pkg_resources import parse_version
+import SCons
+
_icecream_version_min = parse_version("1.1rc2")
_icecream_version_gcc_remote_cpp = parse_version("1.2")
@@ -50,34 +51,44 @@ class _BoundSubstitution:
def icecc_create_env(env, target, source, for_signature):
# Safe to assume unix here because icecream only works on Unix
- mkdir = "mkdir -p ${ICECC_VERSION.Dir('').abspath}"
+ mkdir = "mkdir -p ${TARGET.dir}"
# Create the env, use awk to get just the tarball name and we store it in
# the shell variable $ICECC_VERSION_TMP so the subsequent mv command and
# store it in a known location. Add any files requested from the user environment.
- create_env = "ICECC_VERSION_TMP=$$($ICECC_CREATE_ENV --$ICECC_COMPILER_TYPE $CC $CXX"
+ create_env = "ICECC_VERSION_TMP=$$(${SOURCES[0]} --$ICECC_COMPILER_TYPE ${SOURCES[1]} ${SOURCES[2]}"
+
+ # TODO: It would be a little more elegant if things in
+ # ICECC_CREATE_ENV_ADDFILES were handled as sources, because we
+ # would get automatic dependency tracking. However, there are some
+ # wrinkles around the mapped case so we have opted to leave it as
+ # just interpreting the env for now.
for addfile in env.get('ICECC_CREATE_ENV_ADDFILES', []):
- if (type(addfile) == tuple
- and len(addfile) == 2):
- if env['ICECREAM_VERSION'] > parse_version('1.1'):
- raise Exception("This version of icecream does not support addfile remapping.")
- create_env += " --addfile {}={}".format(
- env.File(addfile[0]).srcnode().abspath,
- env.File(addfile[1]).srcnode().abspath)
- env.Depends('$ICECC_VERSION', addfile[1])
- elif type(addfile) == str:
- create_env += " --addfile {}".format(env.File(addfile).srcnode().abspath)
- env.Depends('$ICECC_VERSION', addfile)
+ if isinstance(addfile, tuple):
+ if len(addfile) == 2:
+ if env['ICECREAM_VERSION'] > parse_version('1.1'):
+ raise Exception("This version of icecream does not support addfile remapping.")
+ create_env += " --addfile {}={}".format(
+ env.File(addfile[0]).srcnode().abspath,
+ env.File(addfile[1]).srcnode().abspath)
+ env.Depends(target, addfile[1])
+ else:
+ raise Exception(f"Found incorrect icecream addfile format: {str(addfile)}" +
+ f"\ntuple must two elements of the form" +
+ f"\n('chroot dest path', 'source file path')")
else:
- # NOTE: abspath is required by icecream because of
- # this line in icecc-create-env:
- # https://github.com/icecc/icecream/blob/10b9468f5bd30a0fdb058901e91e7a29f1bfbd42/client/icecc-create-env.in#L534
- # which cuts out the two files based off the equals sign and
- # starting slash of the second file
- raise Exception("Found incorrect icecream addfile format: {}" +
- "\nicecream addfiles must be a single path or tuple path format: " +
- "('chroot dest path', 'source file path')".format(
- str(addfile)))
+ try:
+ create_env += f" --addfile {env.File(addfile).srcnode().abspath}"
+ env.Depends(target, addfile)
+ except:
+ # NOTE: abspath is required by icecream because of
+ # this line in icecc-create-env:
+ # https://github.com/icecc/icecream/blob/10b9468f5bd30a0fdb058901e91e7a29f1bfbd42/client/icecc-create-env.in#L534
+ # which cuts out the two files based off the equals sign and
+ # starting slash of the second file
+ raise Exception(f"Found incorrect icecream addfile format: {type(addfile)}" +
+ f"\nvalue provided cannot be converted to a file path")
+
create_env += " | awk '/^creating .*\\.tar\\.gz/ { print $$2 }')"
# Simply move our tarball to the expected locale.
@@ -89,10 +100,6 @@ def icecc_create_env(env, target, source, for_signature):
def generate(env):
-
- if not exists(env):
- return
-
# icecc lower then 1.1 supports addfile remapping accidentally
# and above it adds an empty cpuinfo so handle cpuinfo issues for icecream
# below version 1.1
@@ -101,77 +108,56 @@ def generate(env):
and os.path.exists('/proc/cpuinfo')):
env.AppendUnique(ICECC_CREATE_ENV_ADDFILES=[('/proc/cpuinfo', '/dev/null')])
- env["ICECCENVCOMSTR"] = env.get("ICECCENVCOMSTR", "Generating environment: $TARGET")
- env["ICECC_COMPILER_TYPE"] = env.get(
- "ICECC_COMPILER_TYPE", os.path.basename(env.WhereIs("${CC}"))
- )
- env.Append(
- BUILDERS={
- "IcecreamEnv": SCons.Builder.Builder(
- action=SCons.Action.CommandGeneratorAction(
- icecc_create_env, {"comstr": "$ICECCENVCOMSTR"},
- )
- )
- }
- )
-
- # If we are going to load the ccache tool, but we haven't done so
- # yet, then explicitly do it now. We need the ccache tool to be in
- # place before we setup icecream because we need to do things a
- # little differently if ccache is in play. If you don't use the
- # TOOLS variable to configure your tools, you should explicitly
- # load the ccache tool before you load icecream.
- ccache_enabled = "CCACHE_VERSION" in env
- if "ccache" in env["TOOLS"] and not ccache_enabled:
- env.Tool("ccache")
-
# Absoluteify, so we can derive ICERUN
env["ICECC"] = env.WhereIs("$ICECC")
- if not "ICERUN" in env:
- env["ICERUN"] = env.File("$ICECC").File("icerun")
-
- # Absoluteify, for parity with ICECC
- env["ICERUN"] = env.WhereIs("$ICERUN")
+ if "ICERUN" in env:
+ # Absoluteify, for parity with ICECC
+ icerun = env.WhereIs("$ICERUN")
+ else:
+ icerun = env.File("$ICECC").File("icerun")
+ env["ICERUN"] = icerun
- env["ICECC_CREATE_ENV"] = env.WhereIs(
- env.get("ICECC_CREATE_ENV", "icecc-create-env")
- )
+ if "ICECC_CREATE_ENV" in env:
+ icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV")
+ else:
+ icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
+ env["ICECC_CREATE_ENV"] = icecc_create_env_bin
- # Make CC and CXX absolute paths too. It is better for icecc.
+ # Make CC and CXX absolute paths too. This ensures the correct paths to
+ # compilers get passed to icecc-create-env rather than letting it
+ # potentially discover something we don't expect via PATH.
env["CC"] = env.WhereIs("$CC")
env["CXX"] = env.WhereIs("$CXX")
- have_explicit_icecc_version = 'ICECC_VERSION' in env and bool(env['ICECC_VERSION'])
- have_icecc_version_url = have_explicit_icecc_version and env["ICECC_VERSION"].startswith("http")
+ # Set up defaults for configuration options
+ env['ICECREAM_TARGET_DIR'] = env.Dir(
+ env.get('ICECREAM_TARGET_DIR', '#./.icecream')
+ )
+ verbose = env.get('ICECREAM_VERBOSE', False)
+ env['ICECREAM_DEBUG'] = env.get('ICECREAM_DEBUG', False)
+
+ # We have a lot of things to build and run that the final user
+ # environment doesn't need to see or know about. Make a custom env
+ # that we use consistently from here to where we end up setting
+ # ICECREAM_RUN_ICECC in the user env.
+ setupEnv = env.Clone(
+ NINJA_SKIP=True
+ )
+
+ if 'ICECC_VERSION' in setupEnv and bool(setupEnv['ICECC_VERSION']):
- if have_explicit_icecc_version and not have_icecc_version_url:
- icecc_version_file = env.File('$ICECC_VERSION')
- if not icecc_version_file.exists():
- raise Exception(
- 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
- )
- env['ICECC_VERSION'] = icecc_version_file
- else:
- # Generate the deterministic name for our tarball
- icecc_version_target_filename = env.subst("${CC}${CXX}.tar.gz").replace("/", "_")[
- 1:
- ]
- icecc_version_dir = env.Dir("$BUILD_ROOT/scons/icecc")
- icecc_known_version = icecc_version_dir.File(icecc_version_target_filename)
-
- if have_icecc_version_url:
- # We do the above weaker validation as opposed to
- # urllib.urlparse (or similar). We really only support http
- # URLs here and any other validation either requires a third
- # party module or accepts things we don't.
- env["ICECC_VERSION_URL"] = env["ICECC_VERSION"]
- env["ICECC_VERSION"] = icecc_known_version
+ if setupEnv["ICECC_VERSION"].startswith("http"):
+
+ quoted = urllib.parse.quote(setupEnv['ICECC_VERSION'], safe=[])
# Use curl / wget to download the toolchain because SCons (and ninja)
# are better at running shell commands than Python functions.
- curl = env.WhereIs("curl")
- wget = env.WhereIs("wget")
+ #
+ # TODO: This all happens SCons side now. Should we just use python to
+ # fetch instead?
+ curl = setupEnv.WhereIs("curl")
+ wget = setupEnv.WhereIs("wget")
if curl:
cmdstr = "curl -L"
@@ -182,99 +168,288 @@ def generate(env):
"You have specified an ICECC_VERSION that is a URL but you have neither wget nor curl installed."
)
- env.Command(
- target="$ICECC_VERSION",
- source=["$CC", "$CXX"],
- action=[
- cmdstr + " -o $TARGET $ICECC_VERSION_URL",
- ],
- )
+ # Copy ICECC_VERSION into ICECC_VERSION_URL so that we can
+ # change ICECC_VERSION without perturbing the effect of
+ # the action.
+ setupEnv['ICECC_VERSION_URL'] = setupEnv['ICECC_VERSION']
+ setupEnv['ICECC_VERSION'] = icecc_version_file = setupEnv.Command(
+ target=f"$ICECREAM_TARGET_DIR/{quoted}",
+ source=[setupEnv.Value(quoted)],
+ action=SCons.Action.Action(
+ f"{cmdstr} -o $TARGET $ICECC_VERSION_URL",
+ "Downloading compiler package from $ICECC_VERSION_URL" if not verbose else str(),
+ ),
+ )[0]
+
else:
- # Make a predictable name for the toolchain
- env["ICECC_VERSION"] = env.File(icecc_known_version)
- env.IcecreamEnv(
- target="$ICECC_VERSION",
- source=["$ICECC_CREATE_ENV", "$CC", "$CXX"],
+ # Convert the users selection into a File node and do some basic validation
+ setupEnv['ICECC_VERSION'] = icecc_version_file = setupEnv.File('$ICECC_VERSION')
+
+ if not icecc_version_file.exists():
+ raise Exception(
+ 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
+ )
+
+ # This is what we are going to call the file names as known to SCons on disk
+ setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name
+
+ else:
+
+ setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get(
+ "ICECC_COMPILER_TYPE", os.path.basename(setupEnv.WhereIs("${CC}"))
+ )
+
+ # This is what we are going to call the file names as known to SCons on disk. We do the
+ # subst early so that we can call `replace` on the result.
+ setupEnv["ICECC_VERSION_ID"] = setupEnv.subst(
+ "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_"
+ )
+
+ setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
+ target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID",
+ source=[
+ "$ICECC_CREATE_ENV",
+ "$CC",
+ "$CXX"
+ ],
+ action=SCons.Action.Action(
+ icecc_create_env,
+ "Generating icecream compiler package: $TARGET" if not verbose else str(),
+ generator=True,
)
+ )[0]
- # Our ICECC_VERSION isn't just a file, so we need to make
- # things depend on it to ensure that it comes into being at
- # the right time. Don't do that for conftests though: we never
- # want to run them remote.
- def icecc_toolchain_dependency_emitter(target, source, env):
- if "conftest" not in str(target[0]):
- env.Requires(target, "$ICECC_VERSION")
- return target, source
-
- # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if
- # we could obtain this from SCons.
- _CSuffixes = [".c"]
- if not SCons.Util.case_sensitive_suffixes(".c", ".C"):
- _CSuffixes.append(".C")
-
- _CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"]
- if SCons.Util.case_sensitive_suffixes(".c", ".C"):
- _CXXSuffixes.append(".C")
-
- suffixes = _CSuffixes + _CXXSuffixes
- for object_builder in SCons.Tool.createObjBuilders(env):
- emitterdict = object_builder.builder.emitter
- for suffix in emitterdict.keys():
- if not suffix in suffixes:
- continue
- base = emitterdict[suffix]
- emitterdict[suffix] = SCons.Builder.ListEmitter(
- [base, icecc_toolchain_dependency_emitter]
+ # At this point, all paths above have produced a file of some sort. We now move on
+ # to producing our own signature for this local file.
+
+ setupEnv.Append(
+ ICECREAM_TARGET_BASE_DIR='$ICECREAM_TARGET_DIR',
+ ICECREAM_TARGET_BASE_FILE='$ICECC_VERSION_ID',
+ ICECREAM_TARGET_BASE='$ICECREAM_TARGET_BASE_DIR/$ICECREAM_TARGET_BASE_FILE',
+ )
+
+ # If the file we are planning to use is not within
+ # ICECREAM_TARGET_DIR then make a local copy of it that is.
+ if icecc_version_file.dir != env['ICECREAM_TARGET_DIR']:
+ setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
+ target=[
+ '${ICECREAM_TARGET_BASE}.local',
+ ],
+ source=icecc_version_file,
+ action=SCons.Defaults.Copy('$TARGET', '$SOURCE'),
+ )
+
+ # There is no point caching the copy.
+ setupEnv.NoCache(icecc_version_file)
+
+ # Now, we compute our own signature of the local compiler package,
+ # and create yet another link to the compiler package with a name
+ # containing our computed signature. Now we know that we can give
+ # this filename to icecc and it will be assured to really reflect
+ # the contents of the package, and not the arbitrary naming of the
+ # file as found on the users filesystem or from
+ # icecc-create-env. We put the absolute path to that filename into
+ # a file that we can read from.
+ icecc_version_info = setupEnv.File(setupEnv.Command(
+ target=[
+ '${ICECREAM_TARGET_BASE}.sha256',
+ '${ICECREAM_TARGET_BASE}.sha256.path',
+ ],
+ source=icecc_version_file,
+ action=SCons.Action.ListAction(
+ [
+
+ # icecc-create-env run twice with the same input will
+ # create files with identical contents, and identical
+ # filenames, but with different hashes because it
+ # includes timestamps. So we compute a new hash based
+ # on the actual stream contents of the file by
+ # untarring it into shasum.
+ SCons.Action.Action(
+ "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
+ "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
+ ),
+
+ SCons.Action.Action(
+ "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
+ "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
+ ),
+
+ SCons.Action.Action(
+ "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
+ "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" if not verbose else str(),
)
+ ],
+ )
+ ))
+
+ # We can't allow these to interact with the cache because the
+ # second action produces a file unknown to SCons. If caching were
+ # permitted, the other two files could be retrieved from cache but
+ # the file produced by the second action could not (and would not)
+ # be. We would end up with a broken setup.
+ setupEnv.NoCache(icecc_version_info)
+
+ # Create a value node that, when built, contains the result of
+ # reading the contents of the sha256.path file. This way we can
+ # pull the value out of the file and substitute it into our
+ # wrapper script.
+ icecc_version_string_value = setupEnv.Command(
+ target=setupEnv.Value(None),
+ source=[
+ icecc_version_info[1]
+ ],
+ action=SCons.Action.Action(
+ lambda env, target, source: target[0].write(source[0].get_text_contents()),
+ "Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(),
+ )
+ )[0]
+
+ def icecc_version_string_generator(source, target, env, for_signature):
+ if for_signature:
+ return icecc_version_string_value.get_csig()
+ return icecc_version_string_value.read()
+
+ # Set the values that will be interpolated into the run-icecc script.
+ setupEnv['ICECC_VERSION'] = icecc_version_string_generator
+
+ # If necessary, we include the users desired architecture in the
+ # interpolated file.
+ icecc_version_arch_string = str()
+ if "ICECC_VERSION_ARCH" in setupEnv:
+ icecc_version_arch_string = "${ICECC_VERSION_ARCH}:"
+
+ # Finally, create the run-icecc wrapper script. The contents will
+ # re-invoke icecc with our sha256 sum named file, ensuring that we
+ # trust the signature to be appropriate. In a pure SCons build, we
+ # actually wouldn't need this Substfile, we could just set
+ # env['ENV]['ICECC_VERSION'] to the Value node above. But that
+ # won't work for Ninja builds where we can't ask for the contents
+ # of such a node easily. Creating a Substfile means that SCons
+ # will take care of generating a file that Ninja can use.
+ run_icecc = setupEnv.Textfile(
+ target="$ICECREAM_TARGET_DIR/run-icecc.sh",
+ source=[
+ '#!/bin/sh',
+ 'ICECC_VERSION=@icecc_version_arch@@icecc_version@ exec @icecc@ "$@"',
+ '',
+ ],
+ SUBST_DICT={
+ '@icecc@' : '$ICECC',
+ '@icecc_version@' : '$ICECC_VERSION',
+ '@icecc_version_arch@' : icecc_version_arch_string,
+ },
+
+ # Don't change around the suffixes
+ TEXTFILEPREFIX=str(),
+ TEXTFILESUFFIX=str(),
+
+ # Somewhat surprising, but even though Ninja will defer to
+ # SCons to invoke this, we still need ninja to be aware of it
+ # so that it knows to invoke SCons to produce it as part of
+ # TEMPLATE expansion. Since we have set NINJA_SKIP=True for
+ # setupEnv, we need to reverse that here.
+ NINJA_SKIP=False
+ )
+
+ setupEnv.AddPostAction(
+ run_icecc,
+ action=SCons.Defaults.Chmod('$TARGET', "u+x"),
+ )
+
+ setupEnv.Depends(
+ target=run_icecc,
+ dependency=[
+
+ # TODO: Without the ICECC dependency, changing ICECC doesn't cause the Substfile
+ # to regenerate. Why is this?
+ '$ICECC',
+
+ # This dependency is necessary so that we build into this
+ # string before we create the file.
+ icecc_version_string_value,
+
+ # TODO: SERVER-50587 We need to make explicit depends here because of NINJA_SKIP. Any
+ # dependencies in the nodes created in setupEnv with NINJA_SKIP would have
+ # that dependency chain hidden from ninja, so they won't be rebuilt unless
+ # added as dependencies here on this node that has NINJA_SKIP=False.
+ '$CC',
+ '$CXX',
+ icecc_version_file,
+ ],
+ )
+
+ # From here out, we make changes to the users `env`.
+ setupEnv = None
+
+ env['ICECREAM_RUN_ICECC'] = run_icecc[0]
+
+ def icecc_toolchain_dependency_emitter(target, source, env):
+ if "conftest" not in str(target[0]):
+ # Requires or Depends? There are trade-offs:
+ #
+ # If it is `Depends`, then enabling or disabling icecream
+ # will cause a global recompile. But, if you regenerate a
+ # new compiler package, you will get a rebuild. If it is
+ # `Requires`, then enabling or disabling icecream will not
+ # necessarily cause a global recompile (it depends if
+ # C[,C,XX]FLAGS get changed when you do so), but on the
+ # other hand if you regenerate a new compiler package you
+ # will *not* get a rebuild.
+ #
+ # For now, we are opting for `Requires`, because it seems
+ # preferable that opting in or out of icecream shouldn't
+ # force a rebuild.
+ env.Requires(target, "$ICECREAM_RUN_ICECC")
+ return target, source
+
+ # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if
+ # we could obtain this from SCons.
+ _CSuffixes = [".c"]
+ if not SCons.Util.case_sensitive_suffixes(".c", ".C"):
+ _CSuffixes.append(".C")
+
+ _CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"]
+ if SCons.Util.case_sensitive_suffixes(".c", ".C"):
+ _CXXSuffixes.append(".C")
+
+ suffixes = _CSuffixes + _CXXSuffixes
+ for object_builder in SCons.Tool.createObjBuilders(env):
+ emitterdict = object_builder.builder.emitter
+ for suffix in emitterdict.keys():
+ if not suffix in suffixes:
+ continue
+ base = emitterdict[suffix]
+ emitterdict[suffix] = SCons.Builder.ListEmitter(
+ [base, icecc_toolchain_dependency_emitter]
+ )
+
+ # Check whether ccache is requested and is a valid tool.
+ if "CCACHE" in env:
+ ccache = SCons.Tool.Tool('ccache')
+ ccache_enabled = bool(ccache) and ccache.exists(env)
+ else:
+ ccache_enabled = False
if env.ToolchainIs("clang"):
env["ENV"]["ICECC_CLANG_REMOTE_CPP"] = 1
elif env.ToolchainIs("gcc"):
- if env["ICECREAM_VERSION"] >= _icecream_version_gcc_remote_cpp:
- if ccache_enabled:
- # Newer versions of Icecream will drop -fdirectives-only from
- # preprocessor and compiler flags if it does not find a remote
- # build host to build on. ccache, on the other hand, will not
- # pass the flag to the compiler if CCACHE_NOCPP2=1, but it will
- # pass it to the preprocessor. The combination of setting
- # CCACHE_NOCPP2=1 and passing the flag can lead to build
- # failures.
-
- # See: https://jira.mongodb.org/browse/SERVER-48443
-
- # We have an open issue with Icecream and ccache to resolve the
- # cause of these build failures. Once the bug is resolved and
- # the fix is deployed, we can remove this entire conditional
- # branch and make it like the one for clang.
- # TODO: https://github.com/icecc/icecream/issues/550
- env["ENV"].pop("CCACHE_NOCPP2", None)
- env["ENV"]["CCACHE_CPP2"] = 1
- try:
- env["CCFLAGS"].remove("-fdirectives-only")
- except ValueError:
- pass
- else:
- # If we can, we should make Icecream do its own preprocessing
- # to reduce concurrency on the local host. We should not do
- # this when ccache is in use because ccache will execute
- # Icecream to do its own preprocessing and then execute
- # Icecream as the compiler on the preprocessed source.
- env["ENV"]["ICECC_REMOTE_CPP"] = 1
+ if env["ICECREAM_VERSION"] < _icecream_version_gcc_remote_cpp:
+ # We aren't going to use ICECC_REMOTE_CPP because icecc
+ # 1.1 doesn't offer it. We disallow fallback to local
+ # builds because the fallback is serial execution.
+ env["ENV"]["ICECC_CARET_WORKAROUND"] = 0
+ elif not ccache_enabled:
+ # If we can, we should make Icecream do its own preprocessing
+ # to reduce concurrency on the local host. We should not do
+ # this when ccache is in use because ccache will execute
+ # Icecream to do its own preprocessing and then execute
+ # Icecream as the compiler on the preprocessed source.
+ env["ENV"]["ICECC_REMOTE_CPP"] = 1
if "ICECC_SCHEDULER" in env:
env["ENV"]["USE_SCHEDULER"] = env["ICECC_SCHEDULER"]
- # Build up the string we will set in the environment to tell icecream
- # about the compiler package.
- icecc_version_string = "${ICECC_VERSION.abspath}"
- if "ICECC_VERSION_ARCH" in env:
- icecc_version_string = "${ICECC_VERSION_ARCH}:" + icecc_version_string
-
- # Use our BoundSubstitition class to put ICECC_VERSION into env['ENV'] with
- # substitution in play. This avoids an early subst which can behave
- # strangely.
- env["ENV"]["ICECC_VERSION"] = _BoundSubstitution(env, icecc_version_string)
-
# If ccache is in play we actually want the icecc binary in the
# CCACHE_PREFIX environment variable, not on the command line, per
# the ccache documentation on compiler wrappers. Otherwise, just
@@ -285,19 +460,22 @@ def generate(env):
# compiler flags (things like -fdirectives-only), but we still try
# to do the right thing.
if ccache_enabled:
- env["ENV"]["CCACHE_PREFIX"] = _BoundSubstitution(env, "$ICECC")
+ # If the path to CCACHE_PREFIX isn't absolute, then it will
+ # look it up in PATH. That isn't what we want here, we make
+ # the path absolute.
+ env['ENV']['CCACHE_PREFIX'] = _BoundSubstitution(env, "${ICECREAM_RUN_ICECC.abspath}")
else:
# Make a generator to expand to ICECC in the case where we are
- # not a conftest. We never want to run conftests
- # remotely. Ideally, we would do this for the CCACHE_PREFIX
- # case above, but unfortunately if we did we would never
- # actually see the conftests, because the BoundSubst means
- # that we will never have a meaningful `target` variable when
- # we are in ENV. Instead, rely on the ccache.py tool to do
- # it's own filtering out of conftests.
+ # not a conftest. We never want to run conftests remotely.
+ # Ideally, we would do this for the CCACHE_PREFIX case above,
+ # but unfortunately if we did we would never actually see the
+ # conftests, because the BoundSubst means that we will never
+ # have a meaningful `target` variable when we are in ENV.
+ # Instead, rely on the ccache.py tool to do it's own filtering
+ # out of conftests.
def icecc_generator(target, source, env, for_signature):
if "conftest" not in str(target[0]):
- return '$ICECC'
+ return '$ICECREAM_RUN_ICECC'
return ''
env['ICECC_GENERATOR'] = icecc_generator
@@ -327,22 +505,29 @@ def generate(env):
env[command] = " ".join(["$( $ICERUN $)", env[command]])
# Uncomment these to debug your icecc integration
- # env['ENV']['ICECC_DEBUG'] = 'debug'
- # env['ENV']['ICECC_LOGFILE'] = 'icecc.log'
+ if env['ICECREAM_DEBUG']:
+ env['ENV']['ICECC_DEBUG'] = 'debug'
+ env['ENV']['ICECC_LOGFILE'] = 'icecc.log'
def exists(env):
- # Assume the tool has run if we already know the version.
- if "ICECREAM_VERSION" in env:
- return True
-
- icecc = env.get("ICECC", False)
- if not icecc:
+ if not env.subst("$ICECC"):
return False
- icecc = env.WhereIs(icecc)
+
+ icecc = env.WhereIs("$ICECC")
if not icecc:
+ # TODO: We should not be printing here because we don't always know the
+ # use case for loading this tool. It may be that the user desires
+ # writing this output to a log file or not even displaying it at all.
+ # We should instead be invoking a callback to SConstruct that it can
+ # interpret as needed. Or better yet, we should use some SCons logging
+ # and error API, if and when one should emerge.
+ print(f"Error: icecc not found at {env['ICECC']}")
return False
+ if 'ICECREAM_VERSION' in env and env['ICECREAM_VERSION'] >= _icecream_version_min:
+ return True
+
pipe = SCons.Action._subproc(
env,
SCons.Util.CLVar(icecc) + ["--version"],
@@ -352,9 +537,26 @@ def exists(env):
)
if pipe.wait() != 0:
+ print(f"Error: failed to execute '{env['ICECC']}'")
return False
validated = False
+
+ if "ICERUN" in env:
+ # Absoluteify, for parity with ICECC
+ icerun = env.WhereIs("$ICERUN")
+ else:
+ icerun = env.File("$ICECC").File("icerun")
+ if not icerun:
+ print(f"Error: the icerun wrapper does not exist at {icerun} as expected")
+
+ if "ICECC_CREATE_ENV" in env:
+ icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV")
+ else:
+ icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
+ if not icecc_create_env_bin:
+ print(f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected")
+
for line in pipe.stdout:
line = line.decode("utf-8")
if validated:
@@ -371,5 +573,7 @@ def exists(env):
if validated:
env['ICECREAM_VERSION'] = icecc_version
+ else:
+ print(f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}")
return validated
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 0ce56407439..6448bf428b5 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -28,6 +28,7 @@ import importlib
import io
import shutil
import shlex
+import textwrap
from glob import glob
from os.path import join as joinpath
@@ -138,7 +139,7 @@ def get_order_only(node):
"""Return a list of order only dependencies for node."""
if node.prerequisites is None:
return []
- return [get_path(src_file(prereq)) for prereq in node.prerequisites]
+ return [get_path(src_file(prereq)) for prereq in node.prerequisites if is_valid_dependent_node(prereq)]
def get_dependencies(node, skip_sources=False):
@@ -147,21 +148,44 @@ def get_dependencies(node, skip_sources=False):
return [
get_path(src_file(child))
for child in node.children()
- if child not in node.sources
+ if child not in node.sources and is_valid_dependent_node(child)
]
- return [get_path(src_file(child)) for child in node.children()]
+ return [get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)]
-def get_inputs(node):
- """Collect the Ninja inputs for node."""
+def get_inputs(node, skip_unknown_types=False):
+ """
+ Collect the Ninja inputs for node.
+
+ If the given node has inputs which can not be converted into something
+ Ninja can process, this will throw an exception. Optionally, those nodes
+ that are not processable can be skipped as inputs with the
+ skip_unknown_types keyword arg.
+ """
executor = node.get_executor()
if executor is not None:
inputs = executor.get_all_sources()
else:
inputs = node.sources
- inputs = [get_path(src_file(o)) for o in inputs]
- return inputs
+ # Some Nodes (e.g. Python.Value Nodes) won't have files associated. We allow these to be
+ # optionally skipped to enable the case where we will re-invoke SCons for things
+ # like TEMPLATE. Otherwise, we have no direct way to express the behavior for such
+ # Nodes in Ninja, so we raise a hard error
+ ninja_nodes = []
+ for input_node in inputs:
+ if isinstance(input_node, (SCons.Node.FS.Base, SCons.Node.Alias.Alias)):
+ ninja_nodes.append(input_node)
+ else:
+ if skip_unknown_types:
+ continue
+ raise Exception("Can't process {} node '{}' as an input for '{}'".format(
+ type(input_node),
+ str(input_node),
+ str(node)))
+
+ # convert node items into raw paths/aliases for ninja
+ return [get_path(src_file(o)) for o in ninja_nodes]
def get_outputs(node):
@@ -179,6 +203,40 @@ def get_outputs(node):
return outputs
+def generate_depfile(env, node, dependencies):
+ """
+ Ninja tool function for writing a depfile. The depfile should include
+ the node path followed by all the dependent files in a makefile format.
+
+ dependencies arg can be a list or a subst generator which returns a list.
+ """
+
+ depfile = os.path.join(get_path(env['NINJA_BUILDDIR']), str(node) + '.depfile')
+
+ # subst_list will take in either a raw list or a subst callable which generates
+ # a list, and return a list of CmdStringHolders which can be converted into raw strings.
+ # If a raw list was passed in, then scons_list will make a list of lists from the original
+ # values and even subst items in the list if they are substitutable. Flatten will flatten
+ # the list in that case, to ensure for either input we have a list of CmdStringHolders.
+ deps_list = env.Flatten(env.subst_list(dependencies))
+
+ # Now that we have the deps in a list as CmdStringHolders, we can convert them into raw strings
+ # and make sure to escape the strings to handle spaces in paths. We also will sort the result
+ # keep the order of the list consistent.
+ escaped_depends = sorted([dep.escape(env.get("ESCAPE", lambda x: x)) for dep in deps_list])
+ depfile_contents = str(node) + ": " + ' '.join(escaped_depends)
+
+ need_rewrite = False
+ try:
+ with open(depfile, 'r') as f:
+ need_rewrite = (f.read() != depfile_contents)
+ except FileNotFoundError:
+ need_rewrite = True
+
+ if need_rewrite:
+ os.makedirs(os.path.dirname(depfile) or '.', exist_ok=True)
+ with open(depfile, 'w') as f:
+ f.write(depfile_contents)
class SConsToNinjaTranslator:
"""Translates SCons Actions into Ninja build objects."""
@@ -250,11 +308,14 @@ class SConsToNinjaTranslator:
# dependencies don't really matter when we're going to shove these to
# the bottom of ninja's DAG anyway and Textfile builders can have text
# content as their source which doesn't work as an implicit dep in
- # ninja.
+ # ninja. We suppress errors on input Nodes types that we cannot handle
+ # since we expect that the re-invocation of SCons will handle dependency
+ # tracking for those Nodes and their dependents.
if name == "_action":
return {
"rule": "TEMPLATE",
"outputs": get_outputs(node),
+ "inputs": get_inputs(node, skip_unknown_types=True),
"implicit": get_dependencies(node, skip_sources=True),
}
@@ -425,8 +486,16 @@ class NinjaState:
"rspfile_content": "$rspc",
"pool": "local_pool",
},
+ # Ninja does not automatically delete the archive before
+ # invoking ar. The ar utility will append to an existing archive, which
+ # can cause duplicate symbols if the symbols moved between object files.
+ # Native SCons will perform this operation so we need to force ninja
+ # to do the same. See related for more info:
+ # https://jira.mongodb.org/browse/SERVER-49457
"AR": {
- "command": "$env$AR @$out.rsp",
+ "command": "{}$env$AR @$out.rsp".format(
+ '' if sys.platform == "win32" else "rm -f $out && "
+ ),
"description": "Archiving $out",
"rspfile": "$out.rsp",
"rspfile_content": "$rspc",
@@ -486,6 +555,7 @@ class NinjaState:
"command": "$SCONS_INVOCATION_W_TARGETS",
"description": "Regenerating $out",
"generator": 1,
+ "depfile": os.path.join(get_path(env['NINJA_BUILDDIR']), '$out.depfile'),
# Console pool restricts to 1 job running at a time,
# it additionally has some special handling about
# passing stdin, stdout, etc to process in this pool
@@ -564,6 +634,8 @@ class NinjaState:
ninja.comment("Generated by scons. DO NOT EDIT.")
+ ninja.variable("builddir", get_path(self.env['NINJA_BUILDDIR']))
+
for pool_name, size in self.pools.items():
ninja.pool(pool_name, size)
@@ -673,6 +745,16 @@ class NinjaState:
build["outputs"] = first_output
+ # Optionally a rule can specify a depfile, and SCons can generate implicit
+ # dependencies into the depfile. This allows for dependencies to come and go
+ # without invalidating the ninja file. The depfile was created in ninja specifically
+ # for dealing with header files appearing and disappearing across rebuilds, but it can
+ # be repurposed for anything, as long as you have a way to regenerate the depfile.
+ # More specific info can be found here: https://ninja-build.org/manual.html#_depfile
+ if rule is not None and rule.get('depfile') and build.get('deps_files'):
+ path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
+ generate_depfile(self.env, path[0], build.pop('deps_files', []))
+
if "inputs" in build:
build["inputs"].sort()
@@ -683,13 +765,13 @@ class NinjaState:
# Special handling for outputs and implicit since we need to
# aggregate not replace for each builder.
- for agg_key in ["outputs", "implicit"]:
+ for agg_key in ["outputs", "implicit", "inputs"]:
new_val = template_builds.get(agg_key, [])
# Use pop so the key is removed and so the update
# below will not overwrite our aggregated values.
cur_val = template_builder.pop(agg_key, [])
- if isinstance(cur_val, list):
+ if is_List(cur_val):
new_val += cur_val
else:
new_val.append(cur_val)
@@ -707,19 +789,25 @@ class NinjaState:
# generate this rule even though SCons should know we're
# dependent on SCons files.
#
- # TODO: We're working on getting an API into SCons that will
- # allow us to query the actual SConscripts used. Right now
- # this glob method has deficiencies like skipping
- # jstests/SConscript and being specific to the MongoDB
- # repository layout.
+ # The REGENERATE rule uses depfile, so we need to generate the depfile
+ # in case any of the SConscripts have changed. The depfile needs to be
+ # path with in the build and the passed ninja file is an abspath, so
+ # we will use SCons to give us the path within the build. Normally
+ # generate_depfile should not be called like this, but instead be called
+ # through the use of custom rules, and filtered out in the normal
+ # list of build generation about. However, because the generate rule
+ # is hardcoded here, we need to do this generate_depfile call manually.
+ ninja_file_path = self.env.File(ninja_file).path
+ generate_depfile(
+ self.env,
+ ninja_file_path,
+ self.env['NINJA_REGENERATE_DEPS']
+ )
+
ninja.build(
- self.env.File(ninja_file).path,
+ ninja_file_path,
rule="REGENERATE",
- implicit=[
- self.env.File("#SConstruct").path,
- __file__,
- ]
- + sorted(glob("src/**/SConscript", recursive=True)),
+ implicit=[__file__],
)
# If we ever change the name/s of the rules that include
@@ -731,8 +819,8 @@ class NinjaState:
pool="console",
implicit=[ninja_file],
variables={
- "cmd": "ninja -f {} -t compdb CC CXX > compile_commands.json".format(
- ninja_file
+ "cmd": "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
+ ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else ''
)
},
)
@@ -848,7 +936,13 @@ def get_command_env(env):
if windows:
command_env += "set '{}={}' && ".format(key, value)
else:
- command_env += "{}={} ".format(key, value)
+ # We address here *only* the specific case that a user might have
+ # an environment variable which somehow gets included and has
+ # spaces in the value. These are escapes that Ninja handles. This
+ # doesn't make builds on paths with spaces (Ninja and SCons issues)
+ # nor expanding response file paths with spaces (Ninja issue) work.
+ value = value.replace(r' ', r'$ ')
+ command_env += "{}='{}' ".format(key, value)
env["NINJA_ENV_VAR_CACHE"] = command_env
return command_env
@@ -902,7 +996,7 @@ def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False):
variables[rule] = cmd
if use_command_env:
variables["env"] = get_command_env(env)
- return rule, variables
+ return rule, variables, [tool_command]
return get_response_file_command
@@ -932,13 +1026,21 @@ def generate_command(env, node, action, targets, sources, executor=None):
return cmd.replace("$", "$$")
-def get_shell_command(env, node, action, targets, sources, executor=None):
+def get_generic_shell_command(env, node, action, targets, sources, executor=None):
return (
"CMD",
{
"cmd": generate_command(env, node, action, targets, sources, executor=None),
"env": get_command_env(env),
},
+ # Since this function is a rule mapping provider, it must return a list of dependencies,
+ # and usually this would be the path to a tool, such as a compiler, used for this rule.
+ # However this function is to generic to be able to reliably extract such deps
+ # from the command, so we return a placeholder empty list. It should be noted that
+ # generally this function will not be used soley and is more like a template to generate
+ # the basics for a custom provider which may have more specific options for a provier
+ # function for a custom NinjaRuleMapping.
+ []
)
@@ -974,12 +1076,39 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
if not comstr:
return None
- provider = __NINJA_RULE_MAPPING.get(comstr, get_shell_command)
- rule, variables = provider(sub_env, node, action, tlist, slist, executor=executor)
+ provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command)
+ rule, variables, provider_deps = provider(sub_env, node, action, tlist, slist, executor=executor)
# Get the dependencies for all targets
implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)})
+ # Now add in the other dependencies related to the command,
+ # e.g. the compiler binary. The ninja rule can be user provided so
+ # we must do some validation to resolve the dependency path for ninja.
+ for provider_dep in provider_deps:
+
+ provider_dep = sub_env.subst(provider_dep)
+ if not provider_dep:
+ continue
+
+ # If the tool is a node, then SCons will resolve the path later, if its not
+ # a node then we assume it generated from build and make sure it is existing.
+ if isinstance(provider_dep, SCons.Node.Node) or os.path.exists(provider_dep):
+ implicit.append(provider_dep)
+ continue
+
+ # Many commands will assume the binary is in the path, so
+ # we accept this as a possible input from a given command.
+ provider_dep_abspath = sub_env.WhereIs(provider_dep)
+ if provider_dep_abspath:
+ implicit.append(provider_dep_abspath)
+ continue
+
+ # Possibly these could be ignore and the build would still work, however it may not always
+ # rebuild correctly, so we hard stop, and force the user to fix the issue with the provided
+ # ninja rule.
+ raise Exception(f"Could not resolve path for {provider_dep} dependency on node '{node}'")
+
ninja_build = {
"order_only": get_order_only(node),
"outputs": get_outputs(node),
@@ -1042,18 +1171,21 @@ def register_custom_handler(env, name, handler):
def register_custom_rule_mapping(env, pre_subst_string, rule):
- """Register a custom handler for SCons function actions."""
+ """Register a function to call for a given rule."""
global __NINJA_RULE_MAPPING
__NINJA_RULE_MAPPING[pre_subst_string] = rule
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None):
+def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False):
"""Allows specification of Ninja rules from inside SCons files."""
rule_obj = {
"command": command,
"description": description if description else "{} $out".format(rule),
}
+ if use_depfile:
+ rule_obj["depfile"] = os.path.join(get_path(env['NINJA_BUILDDIR']), '$out.depfile')
+
if deps is not None:
rule_obj["deps"] = deps
@@ -1091,6 +1223,27 @@ def ninja_contents(original):
return wrapper
+def CheckNinjaCompdbExpand(env, context):
+ """ Configure check testing if ninja's compdb can expand response files"""
+
+ context.Message('Checking if ninja compdb can expand response files... ')
+ ret, output = context.TryAction(
+ action='ninja -f $SOURCE -t compdb -x CMD_RSP > $TARGET',
+ extension='.ninja',
+ text=textwrap.dedent("""
+ rule CMD_RSP
+ command = $cmd @$out.rsp > fake_output.txt
+ description = Building $out
+ rspfile = $out.rsp
+ rspfile_content = $rspc
+ build fake_output.txt: CMD_RSP fake_input.txt
+ cmd = echo
+ pool = console
+ rspc = "test"
+ """))
+ result = '@fake_output.txt.rsp' not in output
+ context.Result(result)
+ return result
def ninja_stat(_self, path):
"""
@@ -1201,12 +1354,20 @@ def generate(env):
env["NINJA_PREFIX"] = env.get("NINJA_PREFIX", "build")
env["NINJA_SUFFIX"] = env.get("NINJA_SUFFIX", "ninja")
env["NINJA_ALIAS_NAME"] = env.get("NINJA_ALIAS_NAME", "generate-ninja")
-
+ env['NINJA_BUILDDIR'] = env.get("NINJA_BUILDDIR", env.Dir(".ninja").path)
ninja_file_name = env.subst("${NINJA_PREFIX}.${NINJA_SUFFIX}")
ninja_file = env.Ninja(target=ninja_file_name, source=[])
env.AlwaysBuild(ninja_file)
env.Alias("$NINJA_ALIAS_NAME", ninja_file)
+ # TODO: API for getting the SConscripts programmatically
+ # exists upstream: https://github.com/SCons/scons/issues/3625
+ def ninja_generate_deps(env):
+ return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True))
+ env['_NINJA_REGENERATE_DEPS_FUNC'] = ninja_generate_deps
+
+ env['NINJA_REGENERATE_DEPS'] = env.get('NINJA_REGENERATE_DEPS', '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}')
+
# This adds the required flags such that the generated compile
# commands will create depfiles as appropriate in the Ninja file.
if env["PLATFORM"] == "win32":
@@ -1214,9 +1375,11 @@ def generate(env):
else:
env.Append(CCFLAGS=["-MMD", "-MF", "${TARGET}.d"])
+ env.AddMethod(CheckNinjaCompdbExpand, "CheckNinjaCompdbExpand")
+
# Provide a way for custom rule authors to easily access command
# generation.
- env.AddMethod(get_shell_command, "NinjaGetShellCommand")
+ env.AddMethod(get_generic_shell_command, "NinjaGetGenericShellCommand")
env.AddMethod(gen_get_response_file_command, "NinjaGenResponseFileProvider")
# Provides a way for users to handle custom FunctionActions they