summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathew Robinson <chasinglogic@gmail.com>2020-01-31 17:25:08 -0500
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-01-31 22:37:45 +0000
commite1774c067b3074f9c28f284061d9de0820f942c7 (patch)
tree6824b3a4cbef40adb9d891ea30f031132ca47a9b
parentd9bd7a1be843ddb38473506ea2aa4802be6d9993 (diff)
downloadmongo-e1774c067b3074f9c28f284061d9de0820f942c7.tar.gz
Revert "SERVER-45302 Only write Ninja file if the contents have changed"
This reverts commit 66c7d6485f6e6f921b5ce455ec1d3f90dd0e1d37.
-rw-r--r--SConstruct16
-rw-r--r--site_scons/site_tools/ninja.py50
2 files changed, 19 insertions, 47 deletions
diff --git a/SConstruct b/SConstruct
index 394385ecf70..4ed8cbce671 100644
--- a/SConstruct
+++ b/SConstruct
@@ -4265,18 +4265,10 @@ env.Alias("distsrc", "distsrc-tgz")
#
# psutil.cpu_count returns None when it can't determine the number. This always
# fails on BSD's for example.
-cpu_count = psutil.cpu_count()
-
-# If using icecream try to set the number of jobs higher than the
-# cpu_count since the cluster will have more concurrency. We want to
-# avoid doing this 8x scaling when ninja is enabled with Icecream
-# however since the ninja tool reads num_jobs to build local pools for
-# execution. So only do the scaling if Scons is driving the icecream
-# build.
-if cpu_count is not None and 'ICECC' in env and get_option('ninja') != 'true':
- env.SetOption('num_jobs', 8 * cpu_count)
-elif cpu_count is not None:
- env.SetOption('num_jobs', cpu_count)
+if psutil.cpu_count() is not None and 'ICECC' not in env:
+ env.SetOption('num_jobs', psutil.cpu_count())
+elif psutil.cpu_count() and 'ICECC' in env:
+ env.SetOption('num_jobs', 8 * psutil.cpu_count())
# Do this as close to last as possible before reading SConscripts, so
diff --git a/site_scons/site_tools/ninja.py b/site_scons/site_tools/ninja.py
index 52917ff180d..b4af805a304 100644
--- a/site_scons/site_tools/ninja.py
+++ b/site_scons/site_tools/ninja.py
@@ -53,7 +53,7 @@ def _install_action_function(_env, node):
"outputs": get_outputs(node),
"rule": "INSTALL",
"pool": "install_pool",
- "inputs": sorted([get_path(src_file(s)) for s in node.sources]),
+ "inputs": [get_path(src_file(s)) for s in node.sources],
"implicit": get_dependencies(node),
}
@@ -65,8 +65,8 @@ def _lib_symlink_action_function(_env, node):
if not symlinks or symlinks is None:
return None
- outputs = sorted([link.get_dir().rel_path(linktgt) for link, linktgt in symlinks])
- inputs = sorted([link.get_path() for link, _ in symlinks])
+ outputs = [link.get_dir().rel_path(linktgt) for link, linktgt in symlinks]
+ inputs = [link.get_path() for link, _ in symlinks]
return {
"outputs": outputs,
@@ -96,15 +96,15 @@ def alias_to_ninja_build(node):
return {
"outputs": get_outputs(node),
"rule": "phony",
- "implicit": sorted([
+ "implicit": [
get_path(n) for n in node.children() if is_valid_dependent_node(n)
- ]),
+ ],
}
def get_dependencies(node):
"""Return a list of dependencies for node."""
- return sorted([get_path(src_file(child)) for child in node.children()])
+ return [get_path(src_file(child)) for child in node.children()]
def get_inputs(node):
@@ -115,7 +115,7 @@ def get_inputs(node):
else:
inputs = node.sources
- inputs = sorted([get_path(src_file(o)) for o in inputs])
+ inputs = [get_path(src_file(o)) for o in inputs]
return inputs
@@ -130,7 +130,7 @@ def get_outputs(node):
else:
outputs = [node]
- outputs = sorted([get_path(o) for o in outputs])
+ outputs = [get_path(o) for o in outputs]
return outputs
@@ -241,7 +241,7 @@ class SConsToNinjaTranslator:
if len(results) == 1:
return results[0]
- all_outputs = sorted(list({output for build in results for output in build["outputs"]}))
+ all_outputs = list({output for build in results for output in build["outputs"]})
# If we have no outputs we're done
if not all_outputs:
return None
@@ -586,12 +586,12 @@ class NinjaState:
ninja.build(
outputs="_generated_sources",
rule="phony",
- implicit=sorted(list(generated_source_files)),
+ implicit=list(generated_source_files),
)
template_builders = []
- for build in sorted(self.builds, key=lambda x: x["outputs"][0]):
+ for build in self.builds:
if build["rule"] == "TEMPLATE":
template_builders.append(build)
continue
@@ -678,7 +678,6 @@ class NinjaState:
template_builds.update(template_builder)
if template_builds.get("outputs", []):
- template_builds["outputs"] = sorted(template_builds["outputs"])
ninja.build(**template_builds)
# We have to glob the SCons files here to teach the ninja file
@@ -695,11 +694,11 @@ class NinjaState:
ninja.build(
ninja_file,
rule="REGENERATE",
- implicit=sorted([
+ implicit=[
self.env.File("#SConstruct").get_abspath(),
os.path.abspath(__file__),
]
- + glob("src/**/SConscript", recursive=True)),
+ + glob("src/**/SConscript", recursive=True),
)
ninja.build(
@@ -746,27 +745,8 @@ class NinjaState:
elif fallback_default_target is not None:
ninja.default(fallback_default_target)
- # Grab the contents of our buffer, we're going to compare it
- # to the existing ninja file (if there is one). This way we
- # don't unnecessarily update the ninja file which would cause
- # a full rebuild of the tree. Since we have the restat
- # variable set to true on the REGENERATE rule if we don't
- # write the file Ninja will re-determine after we complete if
- # the file actually changed and remove targets from the build
- # queue if they didn't need to be rebuilt.
- new_build_ninja = content.getvalue()
-
- if os.path.isfile(ninja_file):
- with open(ninja_file, "r") as build_ninja:
- old_build_ninja = build_ninja.read()
- else:
- old_build_ninja = ""
-
- if new_build_ninja != old_build_ninja:
- with open(ninja_file, "w") as build_ninja:
- build_ninja.write(new_build_ninja)
- else:
- print(ninja_file, "is already up to date.")
+ with open(ninja_file, "w") as build_ninja:
+ build_ninja.write(content.getvalue())
self.__generated = True