summaryrefslogtreecommitdiff
path: root/site_scons
diff options
context:
space:
mode:
authorAndrew Morrow <acm@mongodb.com>2021-08-05 11:24:50 -0400
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-08-05 19:56:33 +0000
commit67a48464b946e4398948185c849a90f14614b555 (patch)
treefd70e739e8fff41250e5d77332b74fed45074146 /site_scons
parentd9f44b62d17205d90ee9d426044c155951fabb11 (diff)
downloadmongo-67a48464b946e4398948185c849a90f14614b555.tar.gz
SERVER-56580 Promote build-tools=nexts to stable
Diffstat (limited to 'site_scons')
-rw-r--r--site_scons/site_tools/ccache.py28
-rw-r--r--site_scons/site_tools/icecream.py28
-rw-r--r--site_scons/site_tools/next/ccache.py172
-rw-r--r--site_scons/site_tools/next/icecream.py579
-rw-r--r--site_scons/site_tools/next/ninja.py1655
-rw-r--r--site_scons/site_tools/ninja.py32
6 files changed, 64 insertions, 2430 deletions
diff --git a/site_scons/site_tools/ccache.py b/site_scons/site_tools/ccache.py
index a4abcd79e14..2a894919f8d 100644
--- a/site_scons/site_tools/ccache.py
+++ b/site_scons/site_tools/ccache.py
@@ -1,16 +1,24 @@
-# Copyright 2019 MongoDB Inc.
+# Copyright 2020 MongoDB Inc.
#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
import math
import os
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index 9cb57571725..7456ed0cc8f 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -1,16 +1,24 @@
-# Copyright 2017 MongoDB Inc.
+# Copyright 2020 MongoDB Inc.
#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
import os
import re
diff --git a/site_scons/site_tools/next/ccache.py b/site_scons/site_tools/next/ccache.py
deleted file mode 100644
index 2a894919f8d..00000000000
--- a/site_scons/site_tools/next/ccache.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright 2020 MongoDB Inc.
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
-import math
-import os
-import re
-import subprocess
-
-import SCons
-from pkg_resources import parse_version
-
-# This is the oldest version of ccache that offers support for -gsplit-dwarf
-_ccache_version_min = parse_version("3.2.3")
-
-
-def exists(env):
- """Look for a viable ccache implementation that meets our version requirements."""
- if not env.subst("$CCACHE"):
- return False
-
- ccache = env.WhereIs("$CCACHE")
- if not ccache:
- print(f"Error: ccache not found at {env['CCACHE']}")
- return False
-
- if 'CCACHE_VERSION' in env and env['CCACHE_VERSION'] >= _ccache_version_min:
- return True
-
- pipe = SCons.Action._subproc(
- env,
- SCons.Util.CLVar(ccache) + ["--version"],
- stdin="devnull",
- stderr="devnull",
- stdout=subprocess.PIPE,
- )
-
- if pipe.wait() != 0:
- print(f"Error: failed to execute '{env['CCACHE']}'")
- return False
-
- validated = False
- for line in pipe.stdout:
- line = line.decode("utf-8")
- if validated:
- continue # consume all data
- version_banner = re.search(r"^ccache version", line)
- if not version_banner:
- continue
- ccache_version = re.split("ccache version (.+)", line)
- if len(ccache_version) < 2:
- continue
- ccache_version = parse_version(ccache_version[1])
- if ccache_version >= _ccache_version_min:
- validated = True
-
- if validated:
- env['CCACHE_VERSION'] = ccache_version
- else:
- print(f"Error: failed to verify ccache version >= {_ccache_version_min}, found {ccache_version}")
-
- return validated
-
-
-def generate(env):
- """Add ccache support."""
-
- # Absoluteify
- env["CCACHE"] = env.WhereIs("$CCACHE")
-
- # Propagate CCACHE related variables into the command environment
- for var, host_value in os.environ.items():
- if var.startswith("CCACHE_"):
- env["ENV"][var] = host_value
-
- # SERVER-48289: Adding roll-your-own CFLAGS and CXXFLAGS can cause some very "weird" issues
- # with using icecc and ccache if they turn out not to be supported by the compiler. Rather
- # than try to filter each and every flag someone might try for the ones we know don't
- # work, we'll just let the compiler ignore them. A better approach might be to pre-filter
- # flags coming in from the environment by passing them through the appropriate *IfSupported
- # method, but that's a much larger effort.
- if env.ToolchainIs("clang"):
- env.AppendUnique(CCFLAGS=["-Qunused-arguments"])
-
- # Check whether icecream is requested and is a valid tool.
- if "ICECC" in env:
- icecream = SCons.Tool.Tool('icecream')
- icecream_enabled = bool(icecream) and icecream.exists(env)
- else:
- icecream_enabled = False
-
- # Set up a performant ccache configuration. Here, we don't use a second preprocessor and
- # pass preprocessor arguments that deterministically expand source files so a stable
- # hash can be calculated on them. This both reduces the amount of work ccache needs to
- # do and increases the likelihood of a cache hit.
- if env.ToolchainIs("clang"):
- env["ENV"].pop("CCACHE_CPP2", None)
- env["ENV"]["CCACHE_NOCPP2"] = "1"
- env.AppendUnique(CCFLAGS=["-frewrite-includes"])
- elif env.ToolchainIs("gcc"):
- if icecream_enabled:
- # Newer versions of Icecream will drop -fdirectives-only from
- # preprocessor and compiler flags if it does not find a remote
- # build host to build on. ccache, on the other hand, will not
- # pass the flag to the compiler if CCACHE_NOCPP2=1, but it will
- # pass it to the preprocessor. The combination of setting
- # CCACHE_NOCPP2=1 and passing the flag can lead to build
- # failures.
-
- # See: https://jira.mongodb.org/browse/SERVER-48443
- # We have an open issue with Icecream and ccache to resolve the
- # cause of these build failures. Once the bug is resolved and
- # the fix is deployed, we can remove this entire conditional
- # branch and make it like the one for clang.
- # TODO: https://github.com/icecc/icecream/issues/550
- env["ENV"].pop("CCACHE_CPP2", None)
- env["ENV"]["CCACHE_NOCPP2"] = "1"
- else:
- env["ENV"].pop("CCACHE_NOCPP2", None)
- env["ENV"]["CCACHE_CPP2"] = "1"
- env.AppendUnique(CCFLAGS=["-fdirectives-only"])
-
- # Ensure ccache accounts for any extra files in use that affects the generated object
- # file. This can be used for situations where a file is passed as an argument to a
- # compiler parameter and differences in the file need to be accounted for in the
- # hash result to prevent erroneous cache hits.
- if "CCACHE_EXTRAFILES" in env and env["CCACHE_EXTRAFILES"]:
- env["ENV"]["CCACHE_EXTRAFILES"] = ":".join([
- blackfile.path
- for blackfile in env["CCACHE_EXTRAFILES"]
- ])
-
- # Make a generator to expand to CCACHE in the case where we are
- # not a conftest. We don't want to use ccache for configure tests
- # because we don't want to use icecream for configure tests, but
- # when icecream and ccache are combined we can't easily filter out
- # configure tests for icecream since in that combination we use
- # CCACHE_PREFIX to express the icecc tool, and at that point it is
- # too late for us to meaningfully filter out conftests. So we just
- # disable ccache for conftests entirely. Which feels safer
- # somehow anyway.
- def ccache_generator(target, source, env, for_signature):
- if "conftest" not in str(target[0]):
- return '$CCACHE'
- return ''
- env['CCACHE_GENERATOR'] = ccache_generator
-
- # Add ccache to the relevant command lines. Wrap the reference to
- # ccache in the $( $) pattern so that turning ccache on or off
- # doesn't invalidate your build.
- env["CCCOM"] = "$( $CCACHE_GENERATOR $)" + env["CCCOM"]
- env["CXXCOM"] = "$( $CCACHE_GENERATOR $)" + env["CXXCOM"]
- env["SHCCCOM"] = "$( $CCACHE_GENERATOR $)" + env["SHCCCOM"]
- env["SHCXXCOM"] = "$( $CCACHE_GENERATOR $)" + env["SHCXXCOM"]
diff --git a/site_scons/site_tools/next/icecream.py b/site_scons/site_tools/next/icecream.py
deleted file mode 100644
index 7456ed0cc8f..00000000000
--- a/site_scons/site_tools/next/icecream.py
+++ /dev/null
@@ -1,579 +0,0 @@
-# Copyright 2020 MongoDB Inc.
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
-import os
-import re
-import subprocess
-import urllib
-
-from pkg_resources import parse_version
-
-import SCons
-
-_icecream_version_min = parse_version("1.1rc2")
-_icecream_version_gcc_remote_cpp = parse_version("1.2")
-
-
-# I'd prefer to use value here, but amazingly, its __str__ returns the
-# *initial* value of the Value and not the built value, if
-# available. That seems like a bug. In the meantime, make our own very
-# sinmple Substition thing.
-class _BoundSubstitution:
- def __init__(self, env, expression):
- self.env = env
- self.expression = expression
- self.result = None
-
- def __str__(self):
- if self.result is None:
- self.result = self.env.subst(self.expression)
- return self.result
-
-
-def icecc_create_env(env, target, source, for_signature):
- # Safe to assume unix here because icecream only works on Unix
- mkdir = "mkdir -p ${TARGET.dir}"
-
- # Create the env, use awk to get just the tarball name and we store it in
- # the shell variable $ICECC_VERSION_TMP so the subsequent mv command and
- # store it in a known location. Add any files requested from the user environment.
- create_env = "ICECC_VERSION_TMP=$$(${SOURCES[0]} --$ICECC_COMPILER_TYPE ${SOURCES[1]} ${SOURCES[2]}"
-
- # TODO: It would be a little more elegant if things in
- # ICECC_CREATE_ENV_ADDFILES were handled as sources, because we
- # would get automatic dependency tracking. However, there are some
- # wrinkles around the mapped case so we have opted to leave it as
- # just interpreting the env for now.
- for addfile in env.get('ICECC_CREATE_ENV_ADDFILES', []):
- if isinstance(addfile, tuple):
- if len(addfile) == 2:
- if env['ICECREAM_VERSION'] > parse_version('1.1'):
- raise Exception("This version of icecream does not support addfile remapping.")
- create_env += " --addfile {}={}".format(
- env.File(addfile[0]).srcnode().abspath,
- env.File(addfile[1]).srcnode().abspath)
- env.Depends(target, addfile[1])
- else:
- raise Exception(f"Found incorrect icecream addfile format: {str(addfile)}" +
- f"\ntuple must two elements of the form" +
- f"\n('chroot dest path', 'source file path')")
- else:
- try:
- create_env += f" --addfile {env.File(addfile).srcnode().abspath}"
- env.Depends(target, addfile)
- except:
- # NOTE: abspath is required by icecream because of
- # this line in icecc-create-env:
- # https://github.com/icecc/icecream/blob/10b9468f5bd30a0fdb058901e91e7a29f1bfbd42/client/icecc-create-env.in#L534
- # which cuts out the two files based off the equals sign and
- # starting slash of the second file
- raise Exception(f"Found incorrect icecream addfile format: {type(addfile)}" +
- f"\nvalue provided cannot be converted to a file path")
-
- create_env += " | awk '/^creating .*\\.tar\\.gz/ { print $$2 }')"
-
- # Simply move our tarball to the expected locale.
- mv = "mv $$ICECC_VERSION_TMP $TARGET"
-
- # Daisy chain the commands and then let SCons Subst in the rest.
- cmdline = f"{mkdir} && {create_env} && {mv}"
- return cmdline
-
-
-def generate(env):
- # icecc lower then 1.1 supports addfile remapping accidentally
- # and above it adds an empty cpuinfo so handle cpuinfo issues for icecream
- # below version 1.1
- if (env['ICECREAM_VERSION'] <= parse_version('1.1')
- and env.ToolchainIs("clang")
- and os.path.exists('/proc/cpuinfo')):
- env.AppendUnique(ICECC_CREATE_ENV_ADDFILES=[('/proc/cpuinfo', '/dev/null')])
-
- # Absoluteify, so we can derive ICERUN
- env["ICECC"] = env.WhereIs("$ICECC")
-
- if "ICERUN" in env:
- # Absoluteify, for parity with ICECC
- icerun = env.WhereIs("$ICERUN")
- else:
- icerun = env.File("$ICECC").File("icerun")
- env["ICERUN"] = icerun
-
- if "ICECC_CREATE_ENV" in env:
- icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV")
- else:
- icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
- env["ICECC_CREATE_ENV"] = icecc_create_env_bin
-
- # Make CC and CXX absolute paths too. This ensures the correct paths to
- # compilers get passed to icecc-create-env rather than letting it
- # potentially discover something we don't expect via PATH.
- env["CC"] = env.WhereIs("$CC")
- env["CXX"] = env.WhereIs("$CXX")
-
- # Set up defaults for configuration options
- env['ICECREAM_TARGET_DIR'] = env.Dir(
- env.get('ICECREAM_TARGET_DIR', '#./.icecream')
- )
- verbose = env.get('ICECREAM_VERBOSE', False)
- env['ICECREAM_DEBUG'] = env.get('ICECREAM_DEBUG', False)
-
- # We have a lot of things to build and run that the final user
- # environment doesn't need to see or know about. Make a custom env
- # that we use consistently from here to where we end up setting
- # ICECREAM_RUN_ICECC in the user env.
- setupEnv = env.Clone(
- NINJA_SKIP=True
- )
-
- if 'ICECC_VERSION' in setupEnv and bool(setupEnv['ICECC_VERSION']):
-
- if setupEnv["ICECC_VERSION"].startswith("http"):
-
- quoted = urllib.parse.quote(setupEnv['ICECC_VERSION'], safe=[])
-
- # Use curl / wget to download the toolchain because SCons (and ninja)
- # are better at running shell commands than Python functions.
- #
- # TODO: This all happens SCons side now. Should we just use python to
- # fetch instead?
- curl = setupEnv.WhereIs("curl")
- wget = setupEnv.WhereIs("wget")
-
- if curl:
- cmdstr = "curl -L"
- elif wget:
- cmdstr = "wget"
- else:
- raise Exception(
- "You have specified an ICECC_VERSION that is a URL but you have neither wget nor curl installed."
- )
-
- # Copy ICECC_VERSION into ICECC_VERSION_URL so that we can
- # change ICECC_VERSION without perturbing the effect of
- # the action.
- setupEnv['ICECC_VERSION_URL'] = setupEnv['ICECC_VERSION']
- setupEnv['ICECC_VERSION'] = icecc_version_file = setupEnv.Command(
- target=f"$ICECREAM_TARGET_DIR/{quoted}",
- source=[setupEnv.Value(quoted)],
- action=SCons.Action.Action(
- f"{cmdstr} -o $TARGET $ICECC_VERSION_URL",
- "Downloading compiler package from $ICECC_VERSION_URL" if not verbose else str(),
- ),
- )[0]
-
- else:
- # Convert the users selection into a File node and do some basic validation
- setupEnv['ICECC_VERSION'] = icecc_version_file = setupEnv.File('$ICECC_VERSION')
-
- if not icecc_version_file.exists():
- raise Exception(
- 'The ICECC_VERSION variable set set to {}, but this file does not exist'.format(icecc_version_file)
- )
-
- # This is what we are going to call the file names as known to SCons on disk
- setupEnv["ICECC_VERSION_ID"] = "user_provided." + icecc_version_file.name
-
- else:
-
- setupEnv["ICECC_COMPILER_TYPE"] = setupEnv.get(
- "ICECC_COMPILER_TYPE", os.path.basename(setupEnv.WhereIs("${CC}"))
- )
-
- # This is what we are going to call the file names as known to SCons on disk. We do the
- # subst early so that we can call `replace` on the result.
- setupEnv["ICECC_VERSION_ID"] = setupEnv.subst(
- "icecc-create-env.${CC}${CXX}.tar.gz").replace("/", "_"
- )
-
- setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
- target="$ICECREAM_TARGET_DIR/$ICECC_VERSION_ID",
- source=[
- "$ICECC_CREATE_ENV",
- "$CC",
- "$CXX"
- ],
- action=SCons.Action.Action(
- icecc_create_env,
- "Generating icecream compiler package: $TARGET" if not verbose else str(),
- generator=True,
- )
- )[0]
-
- # At this point, all paths above have produced a file of some sort. We now move on
- # to producing our own signature for this local file.
-
- setupEnv.Append(
- ICECREAM_TARGET_BASE_DIR='$ICECREAM_TARGET_DIR',
- ICECREAM_TARGET_BASE_FILE='$ICECC_VERSION_ID',
- ICECREAM_TARGET_BASE='$ICECREAM_TARGET_BASE_DIR/$ICECREAM_TARGET_BASE_FILE',
- )
-
- # If the file we are planning to use is not within
- # ICECREAM_TARGET_DIR then make a local copy of it that is.
- if icecc_version_file.dir != env['ICECREAM_TARGET_DIR']:
- setupEnv["ICECC_VERSION"] = icecc_version_file = setupEnv.Command(
- target=[
- '${ICECREAM_TARGET_BASE}.local',
- ],
- source=icecc_version_file,
- action=SCons.Defaults.Copy('$TARGET', '$SOURCE'),
- )
-
- # There is no point caching the copy.
- setupEnv.NoCache(icecc_version_file)
-
- # Now, we compute our own signature of the local compiler package,
- # and create yet another link to the compiler package with a name
- # containing our computed signature. Now we know that we can give
- # this filename to icecc and it will be assured to really reflect
- # the contents of the package, and not the arbitrary naming of the
- # file as found on the users filesystem or from
- # icecc-create-env. We put the absolute path to that filename into
- # a file that we can read from.
- icecc_version_info = setupEnv.File(setupEnv.Command(
- target=[
- '${ICECREAM_TARGET_BASE}.sha256',
- '${ICECREAM_TARGET_BASE}.sha256.path',
- ],
- source=icecc_version_file,
- action=SCons.Action.ListAction(
- [
-
- # icecc-create-env run twice with the same input will
- # create files with identical contents, and identical
- # filenames, but with different hashes because it
- # includes timestamps. So we compute a new hash based
- # on the actual stream contents of the file by
- # untarring it into shasum.
- SCons.Action.Action(
- "tar xfO ${SOURCES[0]} | shasum -b -a 256 - | awk '{ print $1 }' > ${TARGETS[0]}",
- "Calculating sha256 sum of ${SOURCES[0]}" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "ln -f ${SOURCES[0]} ${TARGETS[0].dir}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz",
- "Linking ${SOURCES[0]} to its sha256 sum name" if not verbose else str(),
- ),
-
- SCons.Action.Action(
- "echo ${TARGETS[0].dir.abspath}/icecream_py_sha256_$$(cat ${TARGETS[0]}).tar.gz > ${TARGETS[1]}",
- "Storing sha256 sum name for ${SOURCES[0]} to ${TARGETS[1]}" if not verbose else str(),
- )
- ],
- )
- ))
-
- # We can't allow these to interact with the cache because the
- # second action produces a file unknown to SCons. If caching were
- # permitted, the other two files could be retrieved from cache but
- # the file produced by the second action could not (and would not)
- # be. We would end up with a broken setup.
- setupEnv.NoCache(icecc_version_info)
-
- # Create a value node that, when built, contains the result of
- # reading the contents of the sha256.path file. This way we can
- # pull the value out of the file and substitute it into our
- # wrapper script.
- icecc_version_string_value = setupEnv.Command(
- target=setupEnv.Value(None),
- source=[
- icecc_version_info[1]
- ],
- action=SCons.Action.Action(
- lambda env, target, source: target[0].write(source[0].get_text_contents()),
- "Reading compiler package sha256 sum path from $SOURCE" if not verbose else str(),
- )
- )[0]
-
- def icecc_version_string_generator(source, target, env, for_signature):
- if for_signature:
- return icecc_version_string_value.get_csig()
- return icecc_version_string_value.read()
-
- # Set the values that will be interpolated into the run-icecc script.
- setupEnv['ICECC_VERSION'] = icecc_version_string_generator
-
- # If necessary, we include the users desired architecture in the
- # interpolated file.
- icecc_version_arch_string = str()
- if "ICECC_VERSION_ARCH" in setupEnv:
- icecc_version_arch_string = "${ICECC_VERSION_ARCH}:"
-
- # Finally, create the run-icecc wrapper script. The contents will
- # re-invoke icecc with our sha256 sum named file, ensuring that we
- # trust the signature to be appropriate. In a pure SCons build, we
- # actually wouldn't need this Substfile, we could just set
- # env['ENV]['ICECC_VERSION'] to the Value node above. But that
- # won't work for Ninja builds where we can't ask for the contents
- # of such a node easily. Creating a Substfile means that SCons
- # will take care of generating a file that Ninja can use.
- run_icecc = setupEnv.Textfile(
- target="$ICECREAM_TARGET_DIR/run-icecc.sh",
- source=[
- '#!/bin/sh',
- 'ICECC_VERSION=@icecc_version_arch@@icecc_version@ exec @icecc@ "$@"',
- '',
- ],
- SUBST_DICT={
- '@icecc@' : '$ICECC',
- '@icecc_version@' : '$ICECC_VERSION',
- '@icecc_version_arch@' : icecc_version_arch_string,
- },
-
- # Don't change around the suffixes
- TEXTFILEPREFIX=str(),
- TEXTFILESUFFIX=str(),
-
- # Somewhat surprising, but even though Ninja will defer to
- # SCons to invoke this, we still need ninja to be aware of it
- # so that it knows to invoke SCons to produce it as part of
- # TEMPLATE expansion. Since we have set NINJA_SKIP=True for
- # setupEnv, we need to reverse that here.
- NINJA_SKIP=False
- )
-
- setupEnv.AddPostAction(
- run_icecc,
- action=SCons.Defaults.Chmod('$TARGET', "u+x"),
- )
-
- setupEnv.Depends(
- target=run_icecc,
- dependency=[
-
- # TODO: Without the ICECC dependency, changing ICECC doesn't cause the Substfile
- # to regenerate. Why is this?
- '$ICECC',
-
- # This dependency is necessary so that we build into this
- # string before we create the file.
- icecc_version_string_value,
-
- # TODO: SERVER-50587 We need to make explicit depends here because of NINJA_SKIP. Any
- # dependencies in the nodes created in setupEnv with NINJA_SKIP would have
- # that dependency chain hidden from ninja, so they won't be rebuilt unless
- # added as dependencies here on this node that has NINJA_SKIP=False.
- '$CC',
- '$CXX',
- icecc_version_file,
- ],
- )
-
- # From here out, we make changes to the users `env`.
- setupEnv = None
-
- env['ICECREAM_RUN_ICECC'] = run_icecc[0]
-
- def icecc_toolchain_dependency_emitter(target, source, env):
- if "conftest" not in str(target[0]):
- # Requires or Depends? There are trade-offs:
- #
- # If it is `Depends`, then enabling or disabling icecream
- # will cause a global recompile. But, if you regenerate a
- # new compiler package, you will get a rebuild. If it is
- # `Requires`, then enabling or disabling icecream will not
- # necessarily cause a global recompile (it depends if
- # C[,C,XX]FLAGS get changed when you do so), but on the
- # other hand if you regenerate a new compiler package you
- # will *not* get a rebuild.
- #
- # For now, we are opting for `Requires`, because it seems
- # preferable that opting in or out of icecream shouldn't
- # force a rebuild.
- env.Requires(target, "$ICECREAM_RUN_ICECC")
- return target, source
-
- # Cribbed from Tool/cc.py and Tool/c++.py. It would be better if
- # we could obtain this from SCons.
- _CSuffixes = [".c"]
- if not SCons.Util.case_sensitive_suffixes(".c", ".C"):
- _CSuffixes.append(".C")
-
- _CXXSuffixes = [".cpp", ".cc", ".cxx", ".c++", ".C++"]
- if SCons.Util.case_sensitive_suffixes(".c", ".C"):
- _CXXSuffixes.append(".C")
-
- suffixes = _CSuffixes + _CXXSuffixes
- for object_builder in SCons.Tool.createObjBuilders(env):
- emitterdict = object_builder.builder.emitter
- for suffix in emitterdict.keys():
- if not suffix in suffixes:
- continue
- base = emitterdict[suffix]
- emitterdict[suffix] = SCons.Builder.ListEmitter(
- [base, icecc_toolchain_dependency_emitter]
- )
-
- # Check whether ccache is requested and is a valid tool.
- if "CCACHE" in env:
- ccache = SCons.Tool.Tool('ccache')
- ccache_enabled = bool(ccache) and ccache.exists(env)
- else:
- ccache_enabled = False
-
- if env.ToolchainIs("clang"):
- env["ENV"]["ICECC_CLANG_REMOTE_CPP"] = 1
- elif env.ToolchainIs("gcc"):
- if env["ICECREAM_VERSION"] < _icecream_version_gcc_remote_cpp:
- # We aren't going to use ICECC_REMOTE_CPP because icecc
- # 1.1 doesn't offer it. We disallow fallback to local
- # builds because the fallback is serial execution.
- env["ENV"]["ICECC_CARET_WORKAROUND"] = 0
- elif not ccache_enabled:
- # If we can, we should make Icecream do its own preprocessing
- # to reduce concurrency on the local host. We should not do
- # this when ccache is in use because ccache will execute
- # Icecream to do its own preprocessing and then execute
- # Icecream as the compiler on the preprocessed source.
- env["ENV"]["ICECC_REMOTE_CPP"] = 1
-
- if "ICECC_SCHEDULER" in env:
- env["ENV"]["USE_SCHEDULER"] = env["ICECC_SCHEDULER"]
-
- # If ccache is in play we actually want the icecc binary in the
- # CCACHE_PREFIX environment variable, not on the command line, per
- # the ccache documentation on compiler wrappers. Otherwise, just
- # put $ICECC on the command line. We wrap it in the magic "don't
- # consider this part of the build signature" sigils in the hope
- # that enabling and disabling icecream won't cause rebuilds. This
- # is unlikely to really work, since above we have maybe changed
- # compiler flags (things like -fdirectives-only), but we still try
- # to do the right thing.
- if ccache_enabled:
- # If the path to CCACHE_PREFIX isn't absolute, then it will
- # look it up in PATH. That isn't what we want here, we make
- # the path absolute.
- env['ENV']['CCACHE_PREFIX'] = _BoundSubstitution(env, "${ICECREAM_RUN_ICECC.abspath}")
- else:
- # Make a generator to expand to ICECC in the case where we are
- # not a conftest. We never want to run conftests remotely.
- # Ideally, we would do this for the CCACHE_PREFIX case above,
- # but unfortunately if we did we would never actually see the
- # conftests, because the BoundSubst means that we will never
- # have a meaningful `target` variable when we are in ENV.
- # Instead, rely on the ccache.py tool to do it's own filtering
- # out of conftests.
- def icecc_generator(target, source, env, for_signature):
- if "conftest" not in str(target[0]):
- return '$ICECREAM_RUN_ICECC'
- return ''
- env['ICECC_GENERATOR'] = icecc_generator
-
- icecc_string = "$( $ICECC_GENERATOR $)"
- env["CCCOM"] = " ".join([icecc_string, env["CCCOM"]])
- env["CXXCOM"] = " ".join([icecc_string, env["CXXCOM"]])
- env["SHCCCOM"] = " ".join([icecc_string, env["SHCCCOM"]])
- env["SHCXXCOM"] = " ".join([icecc_string, env["SHCXXCOM"]])
-
- # Make common non-compile jobs flow through icerun so we don't
- # kill the local machine. It would be nice to plumb ICERUN in via
- # SPAWN or SHELL but it is too much. You end up running `icerun
- # icecc ...`, and icecream doesn't handle that. We could try to
- # filter and only apply icerun if icecc wasn't present but that
- # seems fragile. If you find your local machine being overrun by
- # jobs, figure out what sort they are and extend this part of the
- # setup.
- icerun_commands = [
- "ARCOM",
- "LINKCOM",
- "PYTHON",
- "SHLINKCOM",
- ]
-
- for command in icerun_commands:
- if command in env:
- env[command] = " ".join(["$( $ICERUN $)", env[command]])
-
- # Uncomment these to debug your icecc integration
- if env['ICECREAM_DEBUG']:
- env['ENV']['ICECC_DEBUG'] = 'debug'
- env['ENV']['ICECC_LOGFILE'] = 'icecc.log'
-
-
-def exists(env):
- if not env.subst("$ICECC"):
- return False
-
- icecc = env.WhereIs("$ICECC")
- if not icecc:
- # TODO: We should not be printing here because we don't always know the
- # use case for loading this tool. It may be that the user desires
- # writing this output to a log file or not even displaying it at all.
- # We should instead be invoking a callback to SConstruct that it can
- # interpret as needed. Or better yet, we should use some SCons logging
- # and error API, if and when one should emerge.
- print(f"Error: icecc not found at {env['ICECC']}")
- return False
-
- if 'ICECREAM_VERSION' in env and env['ICECREAM_VERSION'] >= _icecream_version_min:
- return True
-
- pipe = SCons.Action._subproc(
- env,
- SCons.Util.CLVar(icecc) + ["--version"],
- stdin="devnull",
- stderr="devnull",
- stdout=subprocess.PIPE,
- )
-
- if pipe.wait() != 0:
- print(f"Error: failed to execute '{env['ICECC']}'")
- return False
-
- validated = False
-
- if "ICERUN" in env:
- # Absoluteify, for parity with ICECC
- icerun = env.WhereIs("$ICERUN")
- else:
- icerun = env.File("$ICECC").File("icerun")
- if not icerun:
- print(f"Error: the icerun wrapper does not exist at {icerun} as expected")
-
- if "ICECC_CREATE_ENV" in env:
- icecc_create_env_bin = env.WhereIs("$ICECC_CREATE_ENV")
- else:
- icecc_create_env_bin = env.File("ICECC").File("icecc-create-env")
- if not icecc_create_env_bin:
- print(f"Error: the icecc-create-env utility does not exist at {icecc_create_env_bin} as expected")
-
- for line in pipe.stdout:
- line = line.decode("utf-8")
- if validated:
- continue # consume all data
- version_banner = re.search(r"^ICECC ", line)
- if not version_banner:
- continue
- icecc_version = re.split("ICECC (.+)", line)
- if len(icecc_version) < 2:
- continue
- icecc_version = parse_version(icecc_version[1])
- if icecc_version >= _icecream_version_min:
- validated = True
-
- if validated:
- env['ICECREAM_VERSION'] = icecc_version
- else:
- print(f"Error: failed to verify icecream version >= {_icecream_version_min}, found {icecc_version}")
-
- return validated
diff --git a/site_scons/site_tools/next/ninja.py b/site_scons/site_tools/next/ninja.py
deleted file mode 100644
index acb85d42da4..00000000000
--- a/site_scons/site_tools/next/ninja.py
+++ /dev/null
@@ -1,1655 +0,0 @@
-# Copyright 2019 MongoDB Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Generate build.ninja files from SCons aliases."""
-
-import sys
-import os
-import importlib
-import io
-import shutil
-import shlex
-import textwrap
-
-from glob import glob
-from os.path import join as joinpath
-from os.path import splitext
-
-import SCons
-from SCons.Action import _string_from_cmd_list, get_default_ENV
-from SCons.Util import is_List, flatten_sequence
-from SCons.Script import COMMAND_LINE_TARGETS
-
-NINJA_STATE = None
-NINJA_SYNTAX = "NINJA_SYNTAX"
-NINJA_RULES = "__NINJA_CUSTOM_RULES"
-NINJA_POOLS = "__NINJA_CUSTOM_POOLS"
-NINJA_CUSTOM_HANDLERS = "__NINJA_CUSTOM_HANDLERS"
-NINJA_BUILD = "NINJA_BUILD"
-NINJA_WHEREIS_MEMO = {}
-NINJA_STAT_MEMO = {}
-
-__NINJA_RULE_MAPPING = {}
-
-# These are the types that get_command can do something with
-COMMAND_TYPES = (
- SCons.Action.CommandAction,
- SCons.Action.CommandGeneratorAction,
-)
-
-
-def _install_action_function(_env, node):
- """Install files using the install or copy commands"""
- return {
- "outputs": get_outputs(node),
- "rule": "INSTALL",
- "inputs": [get_path(src_file(s)) for s in node.sources],
- "implicit": get_dependencies(node),
- }
-
-
-def _mkdir_action_function(env, node):
- return {
- "outputs": get_outputs(node),
- "rule": "CMD",
- # implicit explicitly omitted, we translate these so they can be
- # used by anything that depends on these but commonly this is
- # hit with a node that will depend on all of the fake
- # srcnode's that SCons will never give us a rule for leading
- # to an invalid ninja file.
- "variables": {
- # On Windows mkdir "-p" is always on
- "cmd": "{mkdir} $out".format(
- mkdir="mkdir" if env["PLATFORM"] == "win32" else "mkdir -p",
- ),
- },
- }
-
-
-def _lib_symlink_action_function(_env, node):
- """Create shared object symlinks if any need to be created"""
- symlinks = getattr(getattr(node, "attributes", None), "shliblinks", None)
-
- if not symlinks or symlinks is None:
- return None
-
- outputs = [link.get_dir().rel_path(linktgt) for link, linktgt in symlinks]
- inputs = [link.get_path() for link, _ in symlinks]
-
- return {
- "outputs": outputs,
- "inputs": inputs,
- "rule": "SYMLINK",
- "implicit": get_dependencies(node),
- }
-
-
-def is_valid_dependent_node(node):
- """
- Return True if node is not an alias or is an alias that has children
-
- This prevents us from making phony targets that depend on other
- phony targets that will never have an associated ninja build
- target.
-
- We also have to specify that it's an alias when doing the builder
- check because some nodes (like src files) won't have builders but
- are valid implicit dependencies.
- """
- if isinstance(node, SCons.Node.Alias.Alias):
- return node.children()
-
- if not node.env:
- return True
-
- return not node.env.get("NINJA_SKIP")
-
-
-def alias_to_ninja_build(node):
- """Convert an Alias node into a Ninja phony target"""
- return {
- "outputs": get_outputs(node),
- "rule": "phony",
- "implicit": [
- get_path(src_file(n)) for n in node.children() if is_valid_dependent_node(n)
- ],
- }
-
-
-def get_order_only(node):
- """Return a list of order only dependencies for node."""
- if node.prerequisites is None:
- return []
- return [get_path(src_file(prereq)) for prereq in node.prerequisites if is_valid_dependent_node(prereq)]
-
-
-def get_dependencies(node, skip_sources=False):
- """Return a list of dependencies for node."""
- if skip_sources:
- return [
- get_path(src_file(child))
- for child in node.children()
- if child not in node.sources and is_valid_dependent_node(child)
- ]
- return [get_path(src_file(child)) for child in node.children() if is_valid_dependent_node(child)]
-
-
-def get_inputs(node, skip_unknown_types=False):
- """
- Collect the Ninja inputs for node.
-
- If the given node has inputs which can not be converted into something
- Ninja can process, this will throw an exception. Optionally, those nodes
- that are not processable can be skipped as inputs with the
- skip_unknown_types keyword arg.
- """
- executor = node.get_executor()
- if executor is not None:
- inputs = executor.get_all_sources()
- else:
- inputs = node.sources
-
- # Some Nodes (e.g. Python.Value Nodes) won't have files associated. We allow these to be
- # optionally skipped to enable the case where we will re-invoke SCons for things
- # like TEMPLATE. Otherwise, we have no direct way to express the behavior for such
- # Nodes in Ninja, so we raise a hard error
- ninja_nodes = []
- for input_node in inputs:
- if isinstance(input_node, (SCons.Node.FS.Base, SCons.Node.Alias.Alias)):
- ninja_nodes.append(input_node)
- else:
- if skip_unknown_types:
- continue
- raise Exception("Can't process {} node '{}' as an input for '{}'".format(
- type(input_node),
- str(input_node),
- str(node)))
-
- # convert node items into raw paths/aliases for ninja
- return [get_path(src_file(o)) for o in ninja_nodes]
-
-
-def get_outputs(node):
- """Collect the Ninja outputs for node."""
- executor = node.get_executor()
- if executor is not None:
- outputs = executor.get_all_targets()
- else:
- if hasattr(node, "target_peers"):
- outputs = node.target_peers
- else:
- outputs = [node]
-
- outputs = [get_path(o) for o in outputs]
-
- return outputs
-
-def generate_depfile(env, node, dependencies):
- """
- Ninja tool function for writing a depfile. The depfile should include
- the node path followed by all the dependent files in a makefile format.
-
- dependencies arg can be a list or a subst generator which returns a list.
- """
-
- depfile = os.path.join(get_path(env['NINJA_BUILDDIR']), str(node) + '.depfile')
-
- # subst_list will take in either a raw list or a subst callable which generates
- # a list, and return a list of CmdStringHolders which can be converted into raw strings.
- # If a raw list was passed in, then scons_list will make a list of lists from the original
- # values and even subst items in the list if they are substitutable. Flatten will flatten
- # the list in that case, to ensure for either input we have a list of CmdStringHolders.
- deps_list = env.Flatten(env.subst_list(dependencies))
-
- # Now that we have the deps in a list as CmdStringHolders, we can convert them into raw strings
- # and make sure to escape the strings to handle spaces in paths. We also will sort the result
- # keep the order of the list consistent.
- escaped_depends = sorted([dep.escape(env.get("ESCAPE", lambda x: x)) for dep in deps_list])
- depfile_contents = str(node) + ": " + ' '.join(escaped_depends)
-
- need_rewrite = False
- try:
- with open(depfile, 'r') as f:
- need_rewrite = (f.read() != depfile_contents)
- except FileNotFoundError:
- need_rewrite = True
-
- if need_rewrite:
- os.makedirs(os.path.dirname(depfile) or '.', exist_ok=True)
- with open(depfile, 'w') as f:
- f.write(depfile_contents)
-
-class SConsToNinjaTranslator:
- """Translates SCons Actions into Ninja build objects."""
-
- def __init__(self, env):
- self.env = env
- self.func_handlers = {
- # Skip conftest builders
- "_createSource": ninja_noop,
- # SCons has a custom FunctionAction that just makes sure the
- # target isn't static. We let the commands that ninja runs do
- # this check for us.
- "SharedFlagChecker": ninja_noop,
- # The install builder is implemented as a function action.
- "installFunc": _install_action_function,
- "MkdirFunc": _mkdir_action_function,
- "LibSymlinksActionFunction": _lib_symlink_action_function,
- }
-
- self.loaded_custom = False
-
- # pylint: disable=too-many-return-statements
- def action_to_ninja_build(self, node, action=None):
- """Generate build arguments dictionary for node."""
- if not self.loaded_custom:
- self.func_handlers.update(self.env[NINJA_CUSTOM_HANDLERS])
- self.loaded_custom = True
-
- if node.builder is None:
- return None
-
- if action is None:
- action = node.builder.action
-
- if node.env and node.env.get("NINJA_SKIP"):
- return None
-
- build = {}
- env = node.env if node.env else self.env
-
- # Ideally this should never happen, and we do try to filter
- # Ninja builders out of being sources of ninja builders but I
- # can't fix every DAG problem so we just skip ninja_builders
- # if we find one
- if node.builder == self.env["BUILDERS"]["Ninja"]:
- build = None
- elif isinstance(action, SCons.Action.FunctionAction):
- build = self.handle_func_action(node, action)
- elif isinstance(action, SCons.Action.LazyAction):
- # pylint: disable=protected-access
- action = action._generate_cache(env)
- build = self.action_to_ninja_build(node, action=action)
- elif isinstance(action, SCons.Action.ListAction):
- build = self.handle_list_action(node, action)
- elif isinstance(action, COMMAND_TYPES):
- build = get_command(env, node, action)
- else:
- raise Exception("Got an unbuildable ListAction for: {}".format(str(node)))
-
- if build is not None:
- build["order_only"] = get_order_only(node)
-
- if 'conftest' not in str(node):
- node_callback = getattr(node.attributes, "ninja_build_callback", None)
- if callable(node_callback):
- node_callback(env, node, build)
-
- return build
-
- def handle_func_action(self, node, action):
- """Determine how to handle the function action."""
- name = action.function_name()
- # This is the name given by the Subst/Textfile builders. So return the
- # node to indicate that SCons is required. We skip sources here because
- # dependencies don't really matter when we're going to shove these to
- # the bottom of ninja's DAG anyway and Textfile builders can have text
- # content as their source which doesn't work as an implicit dep in
- # ninja. We suppress errors on input Nodes types that we cannot handle
- # since we expect that the re-invocation of SCons will handle dependency
- # tracking for those Nodes and their dependents.
- if name == "_action":
- return {
- "rule": "TEMPLATE",
- "outputs": get_outputs(node),
- "inputs": get_inputs(node, skip_unknown_types=True),
- "implicit": get_dependencies(node, skip_sources=True),
- }
-
- handler = self.func_handlers.get(name, None)
- if handler is not None:
- return handler(node.env if node.env else self.env, node)
-
- raise Exception(
- "Found unhandled function action {}, "
- " generating scons command to build\n"
- "Note: this is less efficient than Ninja,"
- " you can write your own ninja build generator for"
- " this function using NinjaRegisterFunctionHandler".format(name)
- )
-
- # pylint: disable=too-many-branches
- def handle_list_action(self, node, action):
- """TODO write this comment"""
- results = [
- self.action_to_ninja_build(node, action=act)
- for act in action.list
- if act is not None
- ]
- results = [
- result for result in results if result is not None and result["outputs"]
- ]
- if not results:
- return None
-
- # No need to process the results if we only got a single result
- if len(results) == 1:
- return results[0]
-
- all_outputs = list({output for build in results for output in build["outputs"]})
- dependencies = list({dep for build in results for dep in build["implicit"]})
-
- if results[0]["rule"] == "CMD":
- cmdline = ""
- for cmd in results:
-
- # Occasionally a command line will expand to a
- # whitespace only string (i.e. ' '). Which is not a
- # valid command but does not trigger the empty command
- # condition if not cmdstr. So here we strip preceding
- # and proceeding whitespace to make strings like the
- # above become empty strings and so will be skipped.
- cmdstr = cmd["variables"]["cmd"].strip()
- if not cmdstr:
- continue
-
- # Skip duplicate commands
- if cmdstr in cmdline:
- continue
-
- if cmdline:
- cmdline += " && "
-
- cmdline += cmdstr
-
- # Remove all preceding and proceeding whitespace
- cmdline = cmdline.strip()
-
- # Make sure we didn't generate an empty cmdline
- if cmdline:
- ninja_build = {
- "outputs": all_outputs,
- "rule": "CMD",
- "variables": {
- "cmd": cmdline,
- "env": get_command_env(node.env if node.env else self.env),
- },
- "implicit": dependencies,
- }
-
- if node.env and node.env.get("NINJA_POOL", None) is not None:
- ninja_build["pool"] = node.env["pool"]
-
- return ninja_build
-
- elif results[0]["rule"] == "phony":
- return {
- "outputs": all_outputs,
- "rule": "phony",
- "implicit": dependencies,
- }
-
- elif results[0]["rule"] == "INSTALL":
- return {
- "outputs": all_outputs,
- "rule": "INSTALL",
- "inputs": [get_path(src_file(s)) for s in node.sources],
- "implicit": dependencies,
- }
-
- raise Exception("Unhandled list action with rule: " + results[0]["rule"])
-
-
-# pylint: disable=too-many-instance-attributes
-class NinjaState:
- """Maintains state of Ninja build system as it's translated from SCons."""
-
- def __init__(self, env, writer_class):
- self.env = env
- self.writer_class = writer_class
- self.__generated = False
- self.translator = SConsToNinjaTranslator(env)
- self.generated_suffixes = env.get("NINJA_GENERATED_SOURCE_SUFFIXES", [])
-
- # List of generated builds that will be written at a later stage
- self.builds = dict()
-
- # List of targets for which we have generated a build. This
- # allows us to take multiple Alias nodes as sources and to not
- # fail to build if they have overlapping targets.
- self.built = set()
-
- # SCons sets this variable to a function which knows how to do
- # shell quoting on whatever platform it's run on. Here we use it
- # to make the SCONS_INVOCATION variable properly quoted for things
- # like CCFLAGS
- escape = env.get("ESCAPE", lambda x: x)
-
- self.variables = {
- "COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp",
- "SCONS_INVOCATION": "{} {} __NINJA_NO=1 $out".format(
- sys.executable,
- " ".join(
- [escape(arg) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS]
- ),
- ),
- "SCONS_INVOCATION_W_TARGETS": "{} {}".format(
- sys.executable, " ".join([escape(arg) for arg in sys.argv])
- ),
- # This must be set to a global default per:
- # https://ninja-build.org/manual.html
- #
- # (The deps section)
- "msvc_deps_prefix": "Note: including file:",
- }
-
- self.rules = {
- "CMD": {
- "command": "cmd /c $env$cmd" if sys.platform == "win32" else "$env$cmd",
- "description": "Building $out",
- "pool": "local_pool",
- },
- # We add the deps processing variables to this below. We
- # don't pipe these through cmd.exe on Windows because we
- # use this to generate a compile_commands.json database
- # which can't use the shell command as it's compile
- # command.
- "CC": {
- "command": "$env$CC @$out.rsp",
- "description": "Compiling $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- },
- "CXX": {
- "command": "$env$CXX @$out.rsp",
- "description": "Compiling $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- },
- "LINK": {
- "command": "$env$LINK @$out.rsp",
- "description": "Linking $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- "pool": "local_pool",
- },
- # Ninja does not automatically delete the archive before
- # invoking ar. The ar utility will append to an existing archive, which
- # can cause duplicate symbols if the symbols moved between object files.
- # Native SCons will perform this operation so we need to force ninja
- # to do the same. See related for more info:
- # https://jira.mongodb.org/browse/SERVER-49457
- "AR": {
- "command": "{}$env$AR @$out.rsp".format(
- '' if sys.platform == "win32" else "rm -f $out && "
- ),
- "description": "Archiving $out",
- "rspfile": "$out.rsp",
- "rspfile_content": "$rspc",
- "pool": "local_pool",
- },
- "SYMLINK": {
- "command": (
- "cmd /c mklink $out $in"
- if sys.platform == "win32"
- else "ln -s $in $out"
- ),
- "description": "Symlink $in -> $out",
- },
- "INSTALL": {
- "command": "$COPY $in $out",
- "description": "Install $out",
- "pool": "install_pool",
- # On Windows cmd.exe /c copy does not always correctly
- # update the timestamp on the output file. This leads
- # to a stuck constant timestamp in the Ninja database
- # and needless rebuilds.
- #
- # Adding restat here ensures that Ninja always checks
- # the copy updated the timestamp and that Ninja has
- # the correct information.
- "restat": 1,
- },
- "TEMPLATE": {
- "command": "$SCONS_INVOCATION $out",
- "description": "Rendering $out",
- "pool": "scons_pool",
- "restat": 1,
- },
- "SCONS": {
- "command": "$SCONS_INVOCATION $out",
- "description": "SCons $out",
- "pool": "scons_pool",
- # restat
- # if present, causes Ninja to re-stat the command's outputs
- # after execution of the command. Each output whose
- # modification time the command did not change will be
- # treated as though it had never needed to be built. This
- # may cause the output's reverse dependencies to be removed
- # from the list of pending build actions.
- #
- # We use restat any time we execute SCons because
- # SCons calls in Ninja typically create multiple
- # targets. But since SCons is doing it's own up to
- # date-ness checks it may only update say one of
- # them. Restat will find out which of the multiple
- # build targets did actually change then only rebuild
- # those targets which depend specifically on that
- # output.
- "restat": 1,
- },
- "REGENERATE": {
- "command": "$SCONS_INVOCATION_W_TARGETS",
- "description": "Regenerating $out",
- "generator": 1,
- "depfile": os.path.join(get_path(env['NINJA_BUILDDIR']), '$out.depfile'),
- # Console pool restricts to 1 job running at a time,
- # it additionally has some special handling about
- # passing stdin, stdout, etc to process in this pool
- # that we need for SCons to behave correctly when
- # regenerating Ninja
- "pool": "console",
- # Again we restat in case Ninja thought the
- # build.ninja should be regenerated but SCons knew
- # better.
- "restat": 1,
- },
- }
-
- self.pools = {
- "local_pool": self.env.GetOption("num_jobs"),
- "install_pool": self.env.GetOption("num_jobs") / 2,
- "scons_pool": 1,
- }
-
- for rule in ["CC", "CXX"]:
- if env["PLATFORM"] == "win32":
- self.rules[rule]["deps"] = "msvc"
- else:
- self.rules[rule]["deps"] = "gcc"
- self.rules[rule]["depfile"] = "$out.d"
-
- def add_build(self, node):
- if not node.has_builder():
- return False
-
- if isinstance(node, SCons.Node.Alias.Alias):
- build = alias_to_ninja_build(node)
- else:
- build = self.translator.action_to_ninja_build(node)
-
- # Some things are unbuild-able or need not be built in Ninja
- if build is None:
- return False
-
- node_string = str(node)
- if node_string in self.builds:
- raise Exception("Node {} added to ninja build state more than once".format(node_string))
- self.builds[node_string] = build
- self.built.update(build["outputs"])
- return True
-
- def is_generated_source(self, output):
- """Check if output ends with a known generated suffix."""
- _, suffix = splitext(output)
- return suffix in self.generated_suffixes
-
- def has_generated_sources(self, output):
- """
- Determine if output indicates this is a generated header file.
- """
- for generated in output:
- if self.is_generated_source(generated):
- return True
- return False
-
- # pylint: disable=too-many-branches,too-many-locals
- def generate(self, ninja_file):
- """
- Generate the build.ninja.
-
- This should only be called once for the lifetime of this object.
- """
- if self.__generated:
- return
-
- self.rules.update(self.env.get(NINJA_RULES, {}))
- self.pools.update(self.env.get(NINJA_POOLS, {}))
-
- content = io.StringIO()
- ninja = self.writer_class(content, width=100)
-
- ninja.comment("Generated by scons. DO NOT EDIT.")
-
- ninja.variable("builddir", get_path(self.env['NINJA_BUILDDIR']))
-
- for pool_name, size in self.pools.items():
- ninja.pool(pool_name, size)
-
- for var, val in self.variables.items():
- ninja.variable(var, val)
-
- for rule, kwargs in self.rules.items():
- ninja.rule(rule, **kwargs)
-
- generated_source_files = sorted({
- output
- # First find builds which have header files in their outputs.
- for build in self.builds.values()
- if self.has_generated_sources(build["outputs"])
- for output in build["outputs"]
- # Collect only the header files from the builds with them
- # in their output. We do this because is_generated_source
- # returns True if it finds a header in any of the outputs,
- # here we need to filter so we only have the headers and
- # not the other outputs.
- if self.is_generated_source(output)
- })
-
- if generated_source_files:
- ninja.build(
- outputs="_generated_sources",
- rule="phony",
- implicit=generated_source_files
- )
-
- template_builders = []
-
- for build in [self.builds[key] for key in sorted(self.builds.keys())]:
- if build["rule"] == "TEMPLATE":
- template_builders.append(build)
- continue
-
- if "implicit" in build:
- build["implicit"].sort()
-
- # Don't make generated sources depend on each other. We
- # have to check that none of the outputs are generated
- # sources and none of the direct implicit dependencies are
- # generated sources or else we will create a dependency
- # cycle.
- if (
- generated_source_files
- and not build["rule"] == "INSTALL"
- and set(build["outputs"]).isdisjoint(generated_source_files)
- and set(build.get("implicit", [])).isdisjoint(generated_source_files)
- ):
-
- # Make all non-generated source targets depend on
- # _generated_sources. We use order_only for generated
- # sources so that we don't rebuild the world if one
- # generated source was rebuilt. We just need to make
- # sure that all of these sources are generated before
- # other builds.
- order_only = build.get("order_only", [])
- order_only.append("_generated_sources")
- build["order_only"] = order_only
- if "order_only" in build:
- build["order_only"].sort()
-
- # When using a depfile Ninja can only have a single output
- # but SCons will usually have emitted an output for every
- # thing a command will create because it's caching is much
- # more complex than Ninja's. This includes things like DWO
- # files. Here we make sure that Ninja only ever sees one
- # target when using a depfile. It will still have a command
- # that will create all of the outputs but most targets don't
- # depend direclty on DWO files and so this assumption is safe
- # to make.
- rule = self.rules.get(build["rule"])
-
- # Some rules like 'phony' and other builtins we don't have
- # listed in self.rules so verify that we got a result
- # before trying to check if it has a deps key.
- #
- # Anything using deps or rspfile in Ninja can only have a single
- # output, but we may have a build which actually produces
- # multiple outputs which other targets can depend on. Here we
- # slice up the outputs so we have a single output which we will
- # use for the "real" builder and multiple phony targets that
- # match the file names of the remaining outputs. This way any
- # build can depend on any output from any build.
- #
- # We assume that the first listed output is the 'key'
- # output and is stably presented to us by SCons. For
- # instance if -gsplit-dwarf is in play and we are
- # producing foo.o and foo.dwo, we expect that outputs[0]
- # from SCons will be the foo.o file and not the dwo
- # file. If instead we just sorted the whole outputs array,
- # we would find that the dwo file becomes the
- # first_output, and this breaks, for instance, header
- # dependency scanning.
- if rule is not None and (rule.get("deps") or rule.get("rspfile")):
- first_output, remaining_outputs = (
- build["outputs"][0],
- build["outputs"][1:],
- )
-
- if remaining_outputs:
- ninja.build(
- outputs=sorted(remaining_outputs), rule="phony", implicit=first_output,
- )
-
- build["outputs"] = first_output
-
- # Optionally a rule can specify a depfile, and SCons can generate implicit
- # dependencies into the depfile. This allows for dependencies to come and go
- # without invalidating the ninja file. The depfile was created in ninja specifically
- # for dealing with header files appearing and disappearing across rebuilds, but it can
- # be repurposed for anything, as long as you have a way to regenerate the depfile.
- # More specific info can be found here: https://ninja-build.org/manual.html#_depfile
- if rule is not None and rule.get('depfile') and build.get('deps_files'):
- path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
- generate_depfile(self.env, path[0], build.pop('deps_files', []))
-
- if "inputs" in build:
- build["inputs"].sort()
-
- ninja.build(**build)
-
- template_builds = dict()
- for template_builder in template_builders:
-
- # Special handling for outputs and implicit since we need to
- # aggregate not replace for each builder.
- for agg_key in ["outputs", "implicit", "inputs"]:
- new_val = template_builds.get(agg_key, [])
-
- # Use pop so the key is removed and so the update
- # below will not overwrite our aggregated values.
- cur_val = template_builder.pop(agg_key, [])
- if is_List(cur_val):
- new_val += cur_val
- else:
- new_val.append(cur_val)
- template_builds[agg_key] = new_val
-
- # Collect all other keys
- template_builds.update(template_builder)
-
- if template_builds.get("outputs", []):
- ninja.build(**template_builds)
-
- # We have to glob the SCons files here to teach the ninja file
- # how to regenerate itself. We'll never see ourselves in the
- # DAG walk so we can't rely on action_to_ninja_build to
- # generate this rule even though SCons should know we're
- # dependent on SCons files.
- #
- # The REGENERATE rule uses depfile, so we need to generate the depfile
- # in case any of the SConscripts have changed. The depfile needs to be
- # path with in the build and the passed ninja file is an abspath, so
- # we will use SCons to give us the path within the build. Normally
- # generate_depfile should not be called like this, but instead be called
- # through the use of custom rules, and filtered out in the normal
- # list of build generation about. However, because the generate rule
- # is hardcoded here, we need to do this generate_depfile call manually.
- ninja_file_path = self.env.File(ninja_file).path
- generate_depfile(
- self.env,
- ninja_file_path,
- self.env['NINJA_REGENERATE_DEPS']
- )
-
- ninja.build(
- ninja_file_path,
- rule="REGENERATE",
- implicit=[__file__],
- )
-
- # If we ever change the name/s of the rules that include
- # compile commands (i.e. something like CC) we will need to
- # update this build to reflect that complete list.
- ninja.build(
- "compile_commands.json",
- rule="CMD",
- pool="console",
- implicit=[ninja_file],
- variables={
- "cmd": "ninja -f {} -t compdb {}CC CXX > compile_commands.json".format(
- ninja_file, '-x ' if self.env.get('NINJA_COMPDB_EXPAND') else ''
- )
- },
- )
-
- ninja.build(
- "compiledb", rule="phony", implicit=["compile_commands.json"],
- )
-
- # Look in SCons's list of DEFAULT_TARGETS, find the ones that
- # we generated a ninja build rule for.
- scons_default_targets = [
- get_path(tgt)
- for tgt in SCons.Script.DEFAULT_TARGETS
- if get_path(tgt) in self.built
- ]
-
- # If we found an overlap between SCons's list of default
- # targets and the targets we created ninja builds for then use
- # those as ninja's default as well.
- if scons_default_targets:
- ninja.default(" ".join(scons_default_targets))
-
- with open(ninja_file, "w") as build_ninja:
- build_ninja.write(content.getvalue())
-
- self.__generated = True
-
-
-def get_path(node):
- """
- Return a fake path if necessary.
-
- As an example Aliases use this as their target name in Ninja.
- """
- if hasattr(node, "get_path"):
- return node.get_path()
- return str(node)
-
-
-def rfile(node):
- """
- Return the repository file for node if it has one. Otherwise return node
- """
- if hasattr(node, "rfile"):
- return node.rfile()
- return node
-
-
-def src_file(node):
- """Returns the src code file if it exists."""
- if hasattr(node, "srcnode"):
- src = node.srcnode()
- if src.stat() is not None:
- return src
- return get_path(node)
-
-
-def get_comstr(env, action, targets, sources):
- """Get the un-substituted string for action."""
- # Despite being having "list" in it's name this member is not
- # actually a list. It's the pre-subst'd string of the command. We
- # use it to determine if the command we're about to generate needs
- # to use a custom Ninja rule. By default this redirects CC, CXX,
- # AR, SHLINK, and LINK commands to their respective rules but the
- # user can inject custom Ninja rules and tie them to commands by
- # using their pre-subst'd string.
- if hasattr(action, "process"):
- return action.cmd_list
-
- return action.genstring(targets, sources, env)
-
-
-def get_command_env(env):
- """
- Return a string that sets the enrivonment for any environment variables that
- differ between the OS environment and the SCons command ENV.
-
- It will be compatible with the default shell of the operating system.
- """
- try:
- return env["NINJA_ENV_VAR_CACHE"]
- except KeyError:
- pass
-
- # Scan the ENV looking for any keys which do not exist in
- # os.environ or differ from it. We assume if it's a new or
- # differing key from the process environment then it's
- # important to pass down to commands in the Ninja file.
- ENV = get_default_ENV(env)
- scons_specified_env = {
- key: value
- for key, value in ENV.items()
- if key not in os.environ or os.environ.get(key, None) != value
- }
-
- windows = env["PLATFORM"] == "win32"
- command_env = ""
- for key, value in scons_specified_env.items():
- # Ensure that the ENV values are all strings:
- if is_List(value):
- # If the value is a list, then we assume it is a
- # path list, because that's a pretty common list-like
- # value to stick in an environment variable:
- value = flatten_sequence(value)
- value = joinpath(map(str, value))
- else:
- # If it isn't a string or a list, then we just coerce
- # it to a string, which is the proper way to handle
- # Dir and File instances and will produce something
- # reasonable for just about everything else:
- value = str(value)
-
- if windows:
- command_env += "set '{}={}' && ".format(key, value)
- else:
- # We address here *only* the specific case that a user might have
- # an environment variable which somehow gets included and has
- # spaces in the value. These are escapes that Ninja handles. This
- # doesn't make builds on paths with spaces (Ninja and SCons issues)
- # nor expanding response file paths with spaces (Ninja issue) work.
- value = value.replace(r' ', r'$ ')
- command_env += "{}='{}' ".format(key, value)
-
- env["NINJA_ENV_VAR_CACHE"] = command_env
- return command_env
-
-
-def gen_get_response_file_command(env, rule, tool, tool_is_dynamic=False):
- """Generate a response file command provider for rule name."""
-
- # If win32 using the environment with a response file command will cause
- # ninja to fail to create the response file. Additionally since these rules
- # generally are not piping through cmd.exe /c any environment variables will
- # make CreateProcess fail to start.
- #
- # On POSIX we can still set environment variables even for compile
- # commands so we do so.
- use_command_env = not env["PLATFORM"] == "win32"
- if "$" in tool:
- tool_is_dynamic = True
-
- def get_response_file_command(env, node, action, targets, sources, executor=None):
- if hasattr(action, "process"):
- cmd_list, _, _ = action.process(targets, sources, env, executor=executor)
- cmd_list = [str(c).replace("$", "$$") for c in cmd_list[0]]
- else:
- command = generate_command(
- env, node, action, targets, sources, executor=executor
- )
- cmd_list = shlex.split(command)
-
- if tool_is_dynamic:
- tool_command = env.subst(
- tool, target=targets, source=sources, executor=executor
- )
- else:
- tool_command = tool
-
- try:
- # Add 1 so we always keep the actual tool inside of cmd
- tool_idx = cmd_list.index(tool_command) + 1
- except ValueError:
- raise Exception(
- "Could not find tool {} in {} generated from {}".format(
- tool, cmd_list, get_comstr(env, action, targets, sources)
- )
- )
-
- cmd, rsp_content = cmd_list[:tool_idx], cmd_list[tool_idx:]
- rsp_content = " ".join(rsp_content)
-
- variables = {"rspc": rsp_content}
- variables[rule] = cmd
- if use_command_env:
- variables["env"] = get_command_env(env)
- return rule, variables, [tool_command]
-
- return get_response_file_command
-
-
-def generate_command(env, node, action, targets, sources, executor=None):
- # Actions like CommandAction have a method called process that is
- # used by SCons to generate the cmd_line they need to run. So
- # check if it's a thing like CommandAction and call it if we can.
- if hasattr(action, "process"):
- cmd_list, _, _ = action.process(targets, sources, env, executor=executor)
- cmd = _string_from_cmd_list(cmd_list[0])
- else:
- # Anything else works with genstring, this is most commonly hit by
- # ListActions which essentially call process on all of their
- # commands and concatenate it for us.
- genstring = action.genstring(targets, sources, env)
- if executor is not None:
- cmd = env.subst(genstring, executor=executor)
- else:
- cmd = env.subst(genstring, targets, sources)
-
- cmd = cmd.replace("\n", " && ").strip()
- if cmd.endswith("&&"):
- cmd = cmd[0:-2].strip()
-
- # Escape dollars as necessary
- return cmd.replace("$", "$$")
-
-
-def get_generic_shell_command(env, node, action, targets, sources, executor=None):
- return (
- "CMD",
- {
- "cmd": generate_command(env, node, action, targets, sources, executor=None),
- "env": get_command_env(env),
- },
- # Since this function is a rule mapping provider, it must return a list of dependencies,
- # and usually this would be the path to a tool, such as a compiler, used for this rule.
- # However this function is to generic to be able to reliably extract such deps
- # from the command, so we return a placeholder empty list. It should be noted that
- # generally this function will not be used soley and is more like a template to generate
- # the basics for a custom provider which may have more specific options for a provier
- # function for a custom NinjaRuleMapping.
- []
- )
-
-
-def get_command(env, node, action): # pylint: disable=too-many-branches
- """Get the command to execute for node."""
- if node.env:
- sub_env = node.env
- else:
- sub_env = env
-
- executor = node.get_executor()
- if executor is not None:
- tlist = executor.get_all_targets()
- slist = executor.get_all_sources()
- else:
- if hasattr(node, "target_peers"):
- tlist = node.target_peers
- else:
- tlist = [node]
- slist = node.sources
-
- # Retrieve the repository file for all sources
- slist = [rfile(s) for s in slist]
-
- # Generate a real CommandAction
- if isinstance(action, SCons.Action.CommandGeneratorAction):
- # pylint: disable=protected-access
- action = action._generate(tlist, slist, sub_env, 1, executor=executor)
-
- variables = {}
-
- comstr = get_comstr(sub_env, action, tlist, slist)
- if not comstr:
- return None
-
- provider = __NINJA_RULE_MAPPING.get(comstr, get_generic_shell_command)
- rule, variables, provider_deps = provider(sub_env, node, action, tlist, slist, executor=executor)
-
- # Get the dependencies for all targets
- implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)})
-
- # Now add in the other dependencies related to the command,
- # e.g. the compiler binary. The ninja rule can be user provided so
- # we must do some validation to resolve the dependency path for ninja.
- for provider_dep in provider_deps:
-
- provider_dep = sub_env.subst(provider_dep)
- if not provider_dep:
- continue
-
- # If the tool is a node, then SCons will resolve the path later, if its not
- # a node then we assume it generated from build and make sure it is existing.
- if isinstance(provider_dep, SCons.Node.Node) or os.path.exists(provider_dep):
- implicit.append(provider_dep)
- continue
-
- # in some case the tool could be in the local directory and be suppled without the ext
- # such as in windows, so append the executable suffix and check.
- prog_suffix = sub_env.get('PROGSUFFIX', '')
- provider_dep_ext = provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix
- if os.path.exists(provider_dep_ext):
- implicit.append(provider_dep_ext)
- continue
-
- # Many commands will assume the binary is in the path, so
- # we accept this as a possible input from a given command.
-
- provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(provider_dep, path=os.environ["PATH"])
- if provider_dep_abspath:
- implicit.append(provider_dep_abspath)
- continue
-
- # Possibly these could be ignore and the build would still work, however it may not always
- # rebuild correctly, so we hard stop, and force the user to fix the issue with the provided
- # ninja rule.
- raise Exception(f"Could not resolve path for {provider_dep} dependency on node '{node}'")
-
- ninja_build = {
- "order_only": get_order_only(node),
- "outputs": get_outputs(node),
- "inputs": get_inputs(node),
- "implicit": implicit,
- "rule": rule,
- "variables": variables,
- }
-
- # Don't use sub_env here because we require that NINJA_POOL be set
- # on a per-builder call basis to prevent accidental strange
- # behavior like env['NINJA_POOL'] = 'console' and sub_env can be
- # the global Environment object if node.env is None.
- # Example:
- #
- # Allowed:
- #
- # env.Command("ls", NINJA_POOL="ls_pool")
- #
- # Not allowed and ignored:
- #
- # env["NINJA_POOL"] = "ls_pool"
- # env.Command("ls")
- #
- if node.env and node.env.get("NINJA_POOL", None) is not None:
- ninja_build["pool"] = node.env["NINJA_POOL"]
-
- return ninja_build
-
-
-def ninja_builder(env, target, source):
- """Generate a build.ninja for source."""
- if not isinstance(source, list):
- source = [source]
- if not isinstance(target, list):
- target = [target]
-
- # We have no COMSTR equivalent so print that we're generating
- # here.
- print("Generating:", str(target[0]))
-
- generated_build_ninja = target[0].get_abspath()
- NINJA_STATE.generate(generated_build_ninja)
-
- return 0
-
-
-# pylint: disable=too-few-public-methods
-class AlwaysExecAction(SCons.Action.FunctionAction):
- """Override FunctionAction.__call__ to always execute."""
-
- def __call__(self, *args, **kwargs):
- kwargs["execute"] = 1
- return super().__call__(*args, **kwargs)
-
-
-def register_custom_handler(env, name, handler):
- """Register a custom handler for SCons function actions."""
- env[NINJA_CUSTOM_HANDLERS][name] = handler
-
-
-def register_custom_rule_mapping(env, pre_subst_string, rule):
- """Register a function to call for a given rule."""
- global __NINJA_RULE_MAPPING
- __NINJA_RULE_MAPPING[pre_subst_string] = rule
-
-
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False, use_response_file=False, response_file_content="$rspc"):
- """Allows specification of Ninja rules from inside SCons files."""
- rule_obj = {
- "command": command,
- "description": description if description else "{} $out".format(rule),
- }
-
- if use_depfile:
- rule_obj["depfile"] = os.path.join(get_path(env['NINJA_BUILDDIR']), '$out.depfile')
-
- if deps is not None:
- rule_obj["deps"] = deps
-
- if pool is not None:
- rule_obj["pool"] = pool
-
- if use_response_file:
- rule_obj["rspfile"] = "$out.rsp"
- rule_obj["rspfile_content"] = response_file_content
-
- env[NINJA_RULES][rule] = rule_obj
-
-
-def register_custom_pool(env, pool, size):
- """Allows the creation of custom Ninja pools"""
- env[NINJA_POOLS][pool] = size
-
-def set_build_node_callback(env, node, callback):
- if 'conftest' not in str(node):
- setattr(node.attributes, "ninja_build_callback", callback)
-
-def ninja_csig(original):
- """Return a dummy csig"""
-
- def wrapper(self):
- name = str(self)
- if "SConscript" in name or "SConstruct" in name:
- return original(self)
- return "dummy_ninja_csig"
-
- return wrapper
-
-
-def ninja_contents(original):
- """Return a dummy content without doing IO"""
-
- def wrapper(self):
- name = str(self)
- if "SConscript" in name or "SConstruct" in name:
- return original(self)
- return bytes("dummy_ninja_contents", encoding="utf-8")
-
- return wrapper
-
-def CheckNinjaCompdbExpand(env, context):
- """ Configure check testing if ninja's compdb can expand response files"""
-
- context.Message('Checking if ninja compdb can expand response files... ')
- ret, output = context.TryAction(
- action='ninja -f $SOURCE -t compdb -x CMD_RSP > $TARGET',
- extension='.ninja',
- text=textwrap.dedent("""
- rule CMD_RSP
- command = $cmd @$out.rsp > fake_output.txt
- description = Building $out
- rspfile = $out.rsp
- rspfile_content = $rspc
- build fake_output.txt: CMD_RSP fake_input.txt
- cmd = echo
- pool = console
- rspc = "test"
- """))
- result = '@fake_output.txt.rsp' not in output
- context.Result(result)
- return result
-
-def ninja_stat(_self, path):
- """
- Eternally memoized stat call.
-
- SCons is very aggressive about clearing out cached values. For our
- purposes everything should only ever call stat once since we're
- running in a no_exec build the file system state should not
- change. For these reasons we patch SCons.Node.FS.LocalFS.stat to
- use our eternal memoized dictionary.
- """
- global NINJA_STAT_MEMO
-
- try:
- return NINJA_STAT_MEMO[path]
- except KeyError:
- try:
- result = os.stat(path)
- except os.error:
- result = None
-
- NINJA_STAT_MEMO[path] = result
- return result
-
-
-def ninja_noop(*_args, **_kwargs):
- """
- A general purpose no-op function.
-
- There are many things that happen in SCons that we don't need and
- also don't return anything. We use this to disable those functions
- instead of creating multiple definitions of the same thing.
- """
- return None
-
-
-def ninja_whereis(thing, *_args, **_kwargs):
- """Replace env.WhereIs with a much faster version"""
- global NINJA_WHEREIS_MEMO
-
- # Optimize for success, this gets called significantly more often
- # when the value is already memoized than when it's not.
- try:
- return NINJA_WHEREIS_MEMO[thing]
- except KeyError:
- # We do not honor any env['ENV'] or env[*] variables in the
- # generated ninja ile. Ninja passes your raw shell environment
- # down to it's subprocess so the only sane option is to do the
- # same during generation. At some point, if and when we try to
- # upstream this, I'm sure a sticking point will be respecting
- # env['ENV'] variables and such but it's actually quite
- # complicated. I have a naive version but making it always work
- # with shell quoting is nigh impossible. So I've decided to
- # cross that bridge when it's absolutely required.
- path = shutil.which(thing)
- NINJA_WHEREIS_MEMO[thing] = path
- return path
-
-
-def ninja_always_serial(self, num, taskmaster):
- """Replacement for SCons.Job.Jobs constructor which always uses the Serial Job class."""
- # We still set self.num_jobs to num even though it's a lie. The
- # only consumer of this attribute is the Parallel Job class AND
- # the Main.py function which instantiates a Jobs class. It checks
- # if Jobs.num_jobs is equal to options.num_jobs, so if the user
- # provides -j12 but we set self.num_jobs = 1 they get an incorrect
- # warning about this version of Python not supporting parallel
- # builds. So here we lie so the Main.py will not give a false
- # warning to users.
- self.num_jobs = num
- self.job = SCons.Job.Serial(taskmaster)
-
-
-class NinjaNoResponseFiles(SCons.Platform.TempFileMunge):
- """Overwrite the __call__ method of SCons' TempFileMunge to not delete."""
-
- def __call__(self, target, source, env, for_signature):
- return self.cmd
-
- def _print_cmd_str(*_args, **_kwargs):
- """Disable this method"""
- pass
-
-
-def exists(env):
- """Enable if called."""
-
- # This variable disables the tool when storing the SCons command in the
- # generated ninja file to ensure that the ninja tool is not loaded when
- # SCons should do actual work as a subprocess of a ninja build. The ninja
- # tool is very invasive into the internals of SCons and so should never be
- # enabled when SCons needs to build a target.
- if env.get("__NINJA_NO", "0") == "1":
- return False
-
- return True
-
-
-def generate(env):
- """Generate the NINJA builders."""
- env[NINJA_SYNTAX] = env.get(NINJA_SYNTAX, "ninja_syntax.py")
-
- # Add the Ninja builder.
- always_exec_ninja_action = AlwaysExecAction(ninja_builder, {})
- ninja_builder_obj = SCons.Builder.Builder(action=always_exec_ninja_action)
- env.Append(BUILDERS={"Ninja": ninja_builder_obj})
-
- env["NINJA_PREFIX"] = env.get("NINJA_PREFIX", "build")
- env["NINJA_SUFFIX"] = env.get("NINJA_SUFFIX", "ninja")
- env["NINJA_ALIAS_NAME"] = env.get("NINJA_ALIAS_NAME", "generate-ninja")
- env['NINJA_BUILDDIR'] = env.get("NINJA_BUILDDIR", env.Dir(".ninja").path)
- ninja_file_name = env.subst("${NINJA_PREFIX}.${NINJA_SUFFIX}")
- ninja_file = env.Ninja(target=ninja_file_name, source=[])
- env.AlwaysBuild(ninja_file)
- env.Alias("$NINJA_ALIAS_NAME", ninja_file)
-
- # TODO: API for getting the SConscripts programmatically
- # exists upstream: https://github.com/SCons/scons/issues/3625
- def ninja_generate_deps(env):
- return sorted([env.File("#SConstruct").path] + glob("**/SConscript", recursive=True))
- env['_NINJA_REGENERATE_DEPS_FUNC'] = ninja_generate_deps
-
- env['NINJA_REGENERATE_DEPS'] = env.get('NINJA_REGENERATE_DEPS', '${_NINJA_REGENERATE_DEPS_FUNC(__env__)}')
-
- # This adds the required flags such that the generated compile
- # commands will create depfiles as appropriate in the Ninja file.
- if env["PLATFORM"] == "win32":
- env.Append(CCFLAGS=["/showIncludes"])
- else:
- env.Append(CCFLAGS=["-MMD", "-MF", "${TARGET}.d"])
-
- env.AddMethod(CheckNinjaCompdbExpand, "CheckNinjaCompdbExpand")
-
- # Provide a way for custom rule authors to easily access command
- # generation.
- env.AddMethod(get_generic_shell_command, "NinjaGetGenericShellCommand")
- env.AddMethod(get_command, "NinjaGetCommand")
- env.AddMethod(gen_get_response_file_command, "NinjaGenResponseFileProvider")
- env.AddMethod(set_build_node_callback, "NinjaSetBuildNodeCallback")
-
- # Provides a way for users to handle custom FunctionActions they
- # want to translate to Ninja.
- env[NINJA_CUSTOM_HANDLERS] = {}
- env.AddMethod(register_custom_handler, "NinjaRegisterFunctionHandler")
-
- # Provides a mechanism for inject custom Ninja rules which can
- # then be mapped using NinjaRuleMapping.
- env[NINJA_RULES] = {}
- env.AddMethod(register_custom_rule, "NinjaRule")
-
- # Provides a mechanism for inject custom Ninja pools which can
- # be used by providing the NINJA_POOL="name" as an
- # OverrideEnvironment variable in a builder call.
- env[NINJA_POOLS] = {}
- env.AddMethod(register_custom_pool, "NinjaPool")
-
- # Add the ability to register custom NinjaRuleMappings for Command
- # builders. We don't store this dictionary in the env to prevent
- # accidental deletion of the CC/XXCOM mappings. You can still
- # overwrite them if you really want to but you have to explicit
- # about it this way. The reason is that if they were accidentally
- # deleted you would get a very subtly incorrect Ninja file and
- # might not catch it.
- env.AddMethod(register_custom_rule_mapping, "NinjaRuleMapping")
-
- # TODO: change LINKCOM and SHLINKCOM to handle embedding manifest exe checks
- # without relying on the SCons hacks that SCons uses by default.
- if env["PLATFORM"] == "win32":
- from SCons.Tool.mslink import compositeLinkAction
-
- if env["LINKCOM"] == compositeLinkAction:
- env[
- "LINKCOM"
- ] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows", "$LINKCOMSTR")}'
- env[
- "SHLINKCOM"
- ] = '${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES", "$SHLINKCOMSTR")}'
-
- # Normally in SCons actions for the Program and *Library builders
- # will return "${*COM}" as their pre-subst'd command line. However
- # if a user in a SConscript overwrites those values via key access
- # like env["LINKCOM"] = "$( $ICERUN $)" + env["LINKCOM"] then
- # those actions no longer return the "bracketted" string and
- # instead return something that looks more expanded. So to
- # continue working even if a user has done this we map both the
- # "bracketted" and semi-expanded versions.
- def robust_rule_mapping(var, rule, tool):
- provider = gen_get_response_file_command(env, rule, tool)
- env.NinjaRuleMapping("${" + var + "}", provider)
- env.NinjaRuleMapping(env[var], provider)
-
- robust_rule_mapping("CCCOM", "CC", env["CC"])
- robust_rule_mapping("SHCCCOM", "CC", env["CC"])
- robust_rule_mapping("CXXCOM", "CXX", env["CXX"])
- robust_rule_mapping("SHCXXCOM", "CXX", env["CXX"])
- robust_rule_mapping("LINKCOM", "LINK", "$LINK")
- robust_rule_mapping("SHLINKCOM", "LINK", "$SHLINK")
- robust_rule_mapping("ARCOM", "AR", env["AR"])
-
- # Make SCons node walk faster by preventing unnecessary work
- env.Decider("timestamp-match")
-
- # Used to determine if a build generates a source file. Ninja
- # requires that all generated sources are added as order_only
- # dependencies to any builds that *might* use them.
- env["NINJA_GENERATED_SOURCE_SUFFIXES"] = [".h", ".hpp"]
-
- if env["PLATFORM"] != "win32" and env.get("RANLIBCOM"):
- # There is no way to translate the ranlib list action into
- # Ninja so add the s flag and disable ranlib.
- #
- # This is equivalent to Meson.
- # https://github.com/mesonbuild/meson/blob/master/mesonbuild/linkers.py#L143
- old_arflags = str(env["ARFLAGS"])
- if "s" not in old_arflags:
- old_arflags += "s"
-
- env["ARFLAGS"] = SCons.Util.CLVar([old_arflags])
-
- # Disable running ranlib, since we added 's' above
- env["RANLIBCOM"] = ""
-
- # This is the point of no return, anything after this comment
- # makes changes to SCons that are irreversible and incompatible
- # with a normal SCons build. We return early if __NINJA_NO=1 has
- # been given on the command line (i.e. by us in the generated
- # ninja file) here to prevent these modifications from happening
- # when we want SCons to do work. Everything before this was
- # necessary to setup the builder and other functions so that the
- # tool can be unconditionally used in the users's SCons files.
-
- if not exists(env):
- return
-
- # Set a known variable that other tools can query so they can
- # behave correctly during ninja generation.
- env["GENERATING_NINJA"] = True
-
- # These methods are no-op'd because they do not work during ninja
- # generation, expected to do no work, or simply fail. All of which
- # are slow in SCons. So we overwrite them with no logic.
- SCons.Node.FS.File.make_ready = ninja_noop
- SCons.Node.FS.File.prepare = ninja_noop
- SCons.Node.FS.File.push_to_cache = ninja_noop
- SCons.Executor.Executor.prepare = ninja_noop
- SCons.Taskmaster.Task.prepare = ninja_noop
- SCons.Node.FS.File.built = ninja_noop
- SCons.Node.Node.visited = ninja_noop
-
- # We make lstat a no-op because it is only used for SONAME
- # symlinks which we're not producing.
- SCons.Node.FS.LocalFS.lstat = ninja_noop
-
- # This is a slow method that isn't memoized. We make it a noop
- # since during our generation we will never use the results of
- # this or change the results.
- SCons.Node.FS.is_up_to_date = ninja_noop
-
- # We overwrite stat and WhereIs with eternally memoized
- # implementations. See the docstring of ninja_stat and
- # ninja_whereis for detailed explanations.
- SCons.Node.FS.LocalFS.stat = ninja_stat
- SCons.Util.WhereIs = ninja_whereis
-
- # Monkey patch get_csig and get_contents for some classes. It
- # slows down the build significantly and we don't need contents or
- # content signatures calculated when generating a ninja file since
- # we're not doing any SCons caching or building.
- SCons.Executor.Executor.get_contents = ninja_contents(
- SCons.Executor.Executor.get_contents
- )
- SCons.Node.Alias.Alias.get_contents = ninja_contents(
- SCons.Node.Alias.Alias.get_contents
- )
- SCons.Node.FS.File.get_contents = ninja_contents(SCons.Node.FS.File.get_contents)
- SCons.Node.FS.File.get_csig = ninja_csig(SCons.Node.FS.File.get_csig)
- SCons.Node.FS.Dir.get_csig = ninja_csig(SCons.Node.FS.Dir.get_csig)
- SCons.Node.Alias.Alias.get_csig = ninja_csig(SCons.Node.Alias.Alias.get_csig)
-
- # Ignore CHANGED_SOURCES and CHANGED_TARGETS. We don't want those
- # to have effect in a generation pass because the generator
- # shouldn't generate differently depending on the current local
- # state. Without this, when generating on Windows, if you already
- # had a foo.obj, you would omit foo.cpp from the response file. Do the same for UNCHANGED.
- SCons.Executor.Executor._get_changed_sources = SCons.Executor.Executor._get_sources
- SCons.Executor.Executor._get_changed_targets = SCons.Executor.Executor._get_targets
- SCons.Executor.Executor._get_unchanged_sources = SCons.Executor.Executor._get_sources
- SCons.Executor.Executor._get_unchanged_targets = SCons.Executor.Executor._get_targets
-
- # Replace false action messages with nothing.
- env["PRINT_CMD_LINE_FUNC"] = ninja_noop
-
- # This reduces unnecessary subst_list calls to add the compiler to
- # the implicit dependencies of targets. Since we encode full paths
- # in our generated commands we do not need these slow subst calls
- # as executing the command will fail if the file is not found
- # where we expect it.
- env["IMPLICIT_COMMAND_DEPENDENCIES"] = False
-
- # This makes SCons more aggressively cache MD5 signatures in the
- # SConsign file.
- env.SetOption("max_drift", 1)
-
- # The Serial job class is SIGNIFICANTLY (almost twice as) faster
- # than the Parallel job class for generating Ninja files. So we
- # monkey the Jobs constructor to only use the Serial Job class.
- SCons.Job.Jobs.__init__ = ninja_always_serial
-
- # The environment variable NINJA_SYNTAX points to the
- # ninja_syntax.py module from the ninja sources found here:
- # https://github.com/ninja-build/ninja/blob/master/misc/ninja_syntax.py
- #
- # This should be vendored into the build sources and it's location
- # set in NINJA_SYNTAX. This code block loads the location from
- # that variable, gets the absolute path to the vendored file, gets
- # it's parent directory then uses importlib to import the module
- # dynamically.
- ninja_syntax_file = env[NINJA_SYNTAX]
- if isinstance(ninja_syntax_file, str):
- ninja_syntax_file = env.File(ninja_syntax_file).get_abspath()
- ninja_syntax_mod_dir = os.path.dirname(ninja_syntax_file)
- sys.path.append(ninja_syntax_mod_dir)
- ninja_syntax_mod_name = os.path.basename(ninja_syntax_file)
- ninja_syntax = importlib.import_module(ninja_syntax_mod_name.replace(".py", ""))
-
- global NINJA_STATE
- NINJA_STATE = NinjaState(env, ninja_syntax.Writer)
-
- # Here we will force every builder to use an emitter which makes the ninja
- # file depend on it's target. This forces the ninja file to the bottom of
- # the DAG which is required so that we walk every target, and therefore add
- # it to the global NINJA_STATE, before we try to write the ninja file.
- def ninja_file_depends_on_all(target, source, env):
- if not any("conftest" in str(t) for t in target):
- env.Depends(ninja_file, target)
- return target, source
-
- # The "Alias Builder" isn't in the BUILDERS map so we have to
- # modify it directly.
- SCons.Environment.AliasBuilder.emitter = ninja_file_depends_on_all
-
- for _, builder in env["BUILDERS"].items():
- try:
- emitter = builder.emitter
- if emitter is not None:
- builder.emitter = SCons.Builder.ListEmitter(
- [emitter, ninja_file_depends_on_all]
- )
- else:
- builder.emitter = ninja_file_depends_on_all
- # Users can inject whatever they want into the BUILDERS
- # dictionary so if the thing doesn't have an emitter we'll
- # just ignore it.
- except AttributeError:
- pass
-
- # Here we monkey patch the Task.execute method to not do a bunch of
- # unnecessary work. If a build is a regular builder (i.e not a conftest and
- # not our own Ninja builder) then we add it to the NINJA_STATE. Otherwise we
- # build it like normal. This skips all of the caching work that this method
- # would normally do since we aren't pulling any of these targets from the
- # cache.
- #
- # In the future we may be able to use this to actually cache the build.ninja
- # file once we have the upstream support for referencing SConscripts as File
- # nodes.
- def ninja_execute(self):
- global NINJA_STATE
-
- target = self.targets[0]
- target_name = str(target)
- if target_name != ninja_file_name and "conftest" not in target_name:
- NINJA_STATE.add_build(target)
- else:
- target.build()
-
- SCons.Taskmaster.Task.execute = ninja_execute
-
- # Make needs_execute always return true instead of determining out of
- # date-ness.
- SCons.Script.Main.BuildTask.needs_execute = lambda x: True
-
- # We will eventually need to overwrite TempFileMunge to make it
- # handle persistent tempfiles or get an upstreamed change to add
- # some configurability to it's behavior in regards to tempfiles.
- #
- # Set all three environment variables that Python's
- # tempfile.mkstemp looks at as it behaves differently on different
- # platforms and versions of Python.
- os.environ["TMPDIR"] = env.Dir("$BUILD_DIR/response_files").get_abspath()
- os.environ["TEMP"] = os.environ["TMPDIR"]
- os.environ["TMP"] = os.environ["TMPDIR"]
- if not os.path.isdir(os.environ["TMPDIR"]):
- env.Execute(SCons.Defaults.Mkdir(os.environ["TMPDIR"]))
-
- env["TEMPFILE"] = NinjaNoResponseFiles
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 31aacf81534..acb85d42da4 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -266,6 +266,7 @@ class SConsToNinjaTranslator:
return None
build = {}
+ env = node.env if node.env else self.env
# Ideally this should never happen, and we do try to filter
# Ninja builders out of being sources of ninja builders but I
@@ -277,18 +278,23 @@ class SConsToNinjaTranslator:
build = self.handle_func_action(node, action)
elif isinstance(action, SCons.Action.LazyAction):
# pylint: disable=protected-access
- action = action._generate_cache(node.env if node.env else self.env)
+ action = action._generate_cache(env)
build = self.action_to_ninja_build(node, action=action)
elif isinstance(action, SCons.Action.ListAction):
build = self.handle_list_action(node, action)
elif isinstance(action, COMMAND_TYPES):
- build = get_command(node.env if node.env else self.env, node, action)
+ build = get_command(env, node, action)
else:
raise Exception("Got an unbuildable ListAction for: {}".format(str(node)))
if build is not None:
build["order_only"] = get_order_only(node)
+ if 'conftest' not in str(node):
+ node_callback = getattr(node.attributes, "ninja_build_callback", None)
+ if callable(node_callback):
+ node_callback(env, node, build)
+
return build
def handle_func_action(self, node, action):
@@ -1088,9 +1094,18 @@ def get_command(env, node, action): # pylint: disable=too-many-branches
implicit.append(provider_dep)
continue
+ # in some case the tool could be in the local directory and be suppled without the ext
+ # such as in windows, so append the executable suffix and check.
+ prog_suffix = sub_env.get('PROGSUFFIX', '')
+ provider_dep_ext = provider_dep if provider_dep.endswith(prog_suffix) else provider_dep + prog_suffix
+ if os.path.exists(provider_dep_ext):
+ implicit.append(provider_dep_ext)
+ continue
+
# Many commands will assume the binary is in the path, so
# we accept this as a possible input from a given command.
- provider_dep_abspath = sub_env.WhereIs(provider_dep)
+
+ provider_dep_abspath = sub_env.WhereIs(provider_dep) or sub_env.WhereIs(provider_dep, path=os.environ["PATH"])
if provider_dep_abspath:
implicit.append(provider_dep_abspath)
continue
@@ -1167,7 +1182,7 @@ def register_custom_rule_mapping(env, pre_subst_string, rule):
__NINJA_RULE_MAPPING[pre_subst_string] = rule
-def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False):
+def register_custom_rule(env, rule, command, description="", deps=None, pool=None, use_depfile=False, use_response_file=False, response_file_content="$rspc"):
"""Allows specification of Ninja rules from inside SCons files."""
rule_obj = {
"command": command,
@@ -1183,6 +1198,10 @@ def register_custom_rule(env, rule, command, description="", deps=None, pool=Non
if pool is not None:
rule_obj["pool"] = pool
+ if use_response_file:
+ rule_obj["rspfile"] = "$out.rsp"
+ rule_obj["rspfile_content"] = response_file_content
+
env[NINJA_RULES][rule] = rule_obj
@@ -1190,6 +1209,9 @@ def register_custom_pool(env, pool, size):
"""Allows the creation of custom Ninja pools"""
env[NINJA_POOLS][pool] = size
+def set_build_node_callback(env, node, callback):
+ if 'conftest' not in str(node):
+ setattr(node.attributes, "ninja_build_callback", callback)
def ninja_csig(original):
"""Return a dummy csig"""
@@ -1371,7 +1393,9 @@ def generate(env):
# Provide a way for custom rule authors to easily access command
# generation.
env.AddMethod(get_generic_shell_command, "NinjaGetGenericShellCommand")
+ env.AddMethod(get_command, "NinjaGetCommand")
env.AddMethod(gen_get_response_file_command, "NinjaGenResponseFileProvider")
+ env.AddMethod(set_build_node_callback, "NinjaSetBuildNodeCallback")
# Provides a way for users to handle custom FunctionActions they
# want to translate to Ninja.