summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathew Robinson <chasinglogic@gmail.com>2020-01-06 15:47:18 -0500
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-02-13 15:21:41 +0000
commit12b63497b80c46f31cb1ece3a23887e50a129504 (patch)
tree449a7f9bea146eadeceb6a00c2a39eda27577fa1
parentf31bc89f66632b2d521be2d076dc23f94ff663eb (diff)
downloadmongo-12b63497b80c46f31cb1ece3a23887e50a129504.tar.gz
SERVER-45301 Not all expected aliases are present in the ninja generator output
-rw-r--r--SConstruct93
-rw-r--r--buildscripts/resmokelib/testing/testcases/sdam_json_test.py13
-rw-r--r--etc/evergreen.yml89
-rw-r--r--site_scons/site_tools/auto_archive.py273
-rw-r--r--site_scons/site_tools/auto_install_binaries.py762
-rw-r--r--site_scons/site_tools/mongo_benchmark.py8
-rw-r--r--site_scons/site_tools/mongo_integrationtest.py8
-rw-r--r--site_scons/site_tools/mongo_test_list.py10
-rw-r--r--site_scons/site_tools/mongo_unittest.py21
-rw-r--r--src/mongo/SConscript9
-rw-r--r--src/mongo/client/sdam/SConscript1
-rw-r--r--src/mongo/embedded/mongo_embedded/SConscript8
-rw-r--r--src/mongo/embedded/mongoc_embedded/SConscript8
-rw-r--r--src/mongo/embedded/stitch_support/SConscript9
-rw-r--r--src/mongo/logger/SConscript1
-rw-r--r--src/mongo/shell/utils.js7
-rw-r--r--src/mongo/stdx/SConscript40
-rw-r--r--src/mongo/tools/SConscript3
-rw-r--r--src/third_party/IntelRDFPMathLib20U1/SConscript25
-rw-r--r--src/third_party/wiredtiger/SConscript2
20 files changed, 727 insertions, 663 deletions
diff --git a/SConstruct b/SConstruct
index 7331ef04e42..6d00e2799a5 100644
--- a/SConstruct
+++ b/SConstruct
@@ -3755,33 +3755,6 @@ env["NINJA_SYNTAX"] = "#site_scons/third_party/ninja_syntax.py"
env.Tool('ccache')
env.Tool('icecream')
-resmoke_config = env.Substfile(
- target="#resmoke.ini",
- source="buildscripts/resmoke.ini.in",
- SUBST_DICT={
- "@install_dir@": "$PREFIX_BINDIR" if get_option("install-mode") == "hygienic" else env.Dir("#").abspath,
- }
-)
-
-# Substfile does a poor job of detecting if it needs to rebuild a
-# file. Since this file is cheap to generate we make it always build
-# because it's better to spend the < 1 second generating it than
-# having a developer waste time debugging why they're running the
-# wrong binaries from Resmoke.
-#
-# This doesn't make this file always generate, it only generates if
-# the below Depends wiring have changed. Basically it skips SCons'
-# Deciders
-env.AlwaysBuild(resmoke_config)
-if get_option("install-mode") == "hygienic":
- # We only need to change the config if PREFIX_BINDIR has changed
- # since Resmoke's installDir flag points here.
- env.Depends(env.Dir("$PREFIX_BINDIR"), resmoke_config)
-else:
- # This depends really isn't true but it's the only reliable way
- # for non-hygienic builds to make sure this file is made.
- env.Depends(env.Dir("$BUILD_DIR"), resmoke_config)
-
if get_option('ninja') == 'true':
ninja_builder = Tool("ninja")
ninja_builder.generate(env)
@@ -3804,7 +3777,6 @@ if get_option('ninja') == 'true':
source=[
env.Alias("install-all-meta"),
env.Alias("test-execution-aliases"),
- resmoke_config,
],
)
else:
@@ -3813,7 +3785,6 @@ if get_option('ninja') == 'true':
source=[
env.Alias("all"),
env.Alias("test-execution-aliases"),
- resmoke_config,
],
)
@@ -3862,7 +3833,7 @@ if get_option('ninja') == 'true':
def ninja_test_list_builder(env, node):
- test_files = env["MONGO_TEST_REGISTRY"][node.path]
+ test_files = [test_file.path for test_file in env["MONGO_TEST_REGISTRY"][node.path]]
files = "\\n".join(test_files)
return {
"outputs": node.get_path(),
@@ -3889,8 +3860,14 @@ if get_option('install-mode') == 'hygienic':
if get_option('separate-debug') == "on" or env.TargetOSIs("windows"):
env.Tool('separate_debug')
- env["AIB_TARBALL_SUFFIX"] = "tgz"
+ env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = "tgz"
+
+ env["AIB_META_COMPONENT"] = "all"
+ env["AIB_BASE_COMPONENT"] = "common"
+ env["AIB_DEFAULT_COMPONENT"] = "mongodb"
+
env.Tool('auto_install_binaries')
+ env.Tool('auto_archive')
env.DeclareRoles(
roles=[
@@ -3922,7 +3899,6 @@ if get_option('install-mode') == 'hygienic':
# runtime package.
"debug" if env.TargetOSIs('windows') else None,
],
- transitive=True,
silent=True,
),
],
@@ -3946,39 +3922,29 @@ if get_option('install-mode') == 'hygienic':
env.AddSuffixMapping({
"$PROGSUFFIX": env.SuffixMap(
directory="$PREFIX_BINDIR",
- default_roles=[
- "runtime",
- ]
+ default_role="runtime",
),
"$SHLIBSUFFIX": env.SuffixMap(
directory="$PREFIX_BINDIR" \
if mongo_platform.get_running_os_name() == "windows" \
else "$PREFIX_LIBDIR",
- default_roles=[
- "runtime",
- ]
+ default_role="runtime",
),
".debug": env.SuffixMap(
directory="$PREFIX_DEBUGDIR",
- default_roles=[
- "debug",
- ]
+ default_role="debug",
),
".dSYM": env.SuffixMap(
directory="$PREFIX_DEBUGDIR",
- default_roles=[
- "debug"
- ]
+ default_role="debug",
),
".pdb": env.SuffixMap(
directory="$PREFIX_DEBUGDIR",
- default_roles=[
- "debug"
- ]
+ default_role="debug",
),
})
@@ -4436,6 +4402,39 @@ if has_option("cache"):
addNoCacheEmitter(env['BUILDERS']['LoadableModule'])
+resmoke_install_dir = env.subst("$PREFIX_BINDIR") if get_option("install-mode") == "hygienic" else env.Dir("#").abspath
+resmoke_install_dir = os.path.normpath(resmoke_install_dir).replace("\\", r"\\")
+resmoke_config = env.Substfile(
+ target="#resmoke.ini",
+ source="#buildscripts/resmoke.ini.in",
+ SUBST_DICT={
+ "@install_dir@": resmoke_install_dir,
+ }
+)
+
+# Self-testable installs (PM-1691) will make this unnecessary because we will be
+# installing resmoke (i.e. it will have it's own component and role) and it will
+# be able to detect where the binaries are. For now we need to generate this
+# config file to tell resmoke how to find mongod.
+def resmoke_config_scanner(old_scanner):
+ cfg_file = resmoke_config[0]
+
+ def new_scanner(node, env, path=()):
+ result = old_scanner.function(node, env, path)
+ result.append(cfg_file)
+ return result
+
+ return new_scanner
+
+
+program_builder = env["BUILDERS"]["Program"]
+program_builder.target_scanner = SCons.Scanner.Scanner(
+ function=resmoke_config_scanner(program_builder.target_scanner),
+ path_function=program_builder.target_scanner.path_function,
+)
+
+
+
env.SConscript(
dirs=[
'src',
diff --git a/buildscripts/resmokelib/testing/testcases/sdam_json_test.py b/buildscripts/resmokelib/testing/testcases/sdam_json_test.py
index adf9ce1a0f6..19d364c15d5 100644
--- a/buildscripts/resmokelib/testing/testcases/sdam_json_test.py
+++ b/buildscripts/resmokelib/testing/testcases/sdam_json_test.py
@@ -13,10 +13,7 @@ class SDAMJsonTestCase(interface.ProcessTestCase):
"""Server Discovery and Monitoring JSON test case."""
REGISTERED_NAME = "sdam_json_test"
- if config.INSTALL_DIR is not None:
- EXECUTABLE_BUILD_PATH = os.path.join(config.INSTALL_DIR, "sdam_json_test")
- else:
- EXECUTABLE_BUILD_PATH = "build/**/mongo/client/sdam/sdam_json_test"
+ EXECUTABLE_BUILD_PATH = "build/**/mongo/client/sdam/sdam_json_test"
TEST_DIR = os.path.normpath("src/mongo/client/sdam/json_tests")
def __init__(self, logger, json_test_file, program_options=None):
@@ -28,6 +25,14 @@ class SDAMJsonTestCase(interface.ProcessTestCase):
self.program_options = utils.default_if_none(program_options, {}).copy()
def _find_executable(self):
+ if config.INSTALL_DIR is not None:
+ binary = os.path.join(config.INSTALL_DIR, "sdam_json_test")
+ if os.name == "nt":
+ binary += ".exe"
+
+ if os.path.isfile(binary):
+ return binary
+
execs = globstar.glob(self.EXECUTABLE_BUILD_PATH + '.exe')
if not execs:
execs = globstar.glob(self.EXECUTABLE_BUILD_PATH)
diff --git a/etc/evergreen.yml b/etc/evergreen.yml
index 64efa8ebcba..8ec4ebb293a 100644
--- a/etc/evergreen.yml
+++ b/etc/evergreen.yml
@@ -3484,7 +3484,6 @@ functions:
optional: true
"save unstripped dbtest":
- - *detect_failed_dbtest
- *archive_dbtest
- *archive_dbtest_debug
@@ -3921,7 +3920,7 @@ tasks:
commands:
- func: "scons compile"
vars:
- targets: install-all-meta
+ targets: install-all
compiling_for_test: true
task_compile_flags: >-
--detect-odr-violations
@@ -3964,7 +3963,7 @@ tasks:
commands:
- func: "scons compile"
vars:
- targets: install-unittests install-unittests-debug
+ targets: install-unittests
task_compile_flags: >-
--detect-odr-violations
--separate-debug
@@ -7550,6 +7549,7 @@ tasks:
depends_on:
- name: package
commands:
+ - func: "git get project"
- func: "fetch packages"
- func: "set up remote credentials"
vars:
@@ -8320,9 +8320,6 @@ task_groups:
name: compile_TG
tasks:
- compile
-- <<: *compile_task_group_template
- name: compile_packages_TG
- tasks:
- package
- <<: *compile_task_group_template
name: compile_core_tools_TG
@@ -8364,6 +8361,7 @@ task_groups:
- unittests
- dbtest
- compile_all
+ - package
- name: clang_tidy_TG
setup_group_can_fail_task: true
@@ -8618,7 +8616,6 @@ buildvariants:
- name: watchdog_wiredtiger
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -8673,7 +8670,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .watchdog
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -8736,7 +8732,6 @@ buildvariants:
- name: .sharding .txns
- name: .stitch
- name: .ssl
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -8785,7 +8780,6 @@ buildvariants:
- name: sharding_jscore_passthrough
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1604-test
@@ -8833,7 +8827,6 @@ buildvariants:
- name: sharding_jscore_passthrough
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1804-test
@@ -8863,7 +8856,6 @@ buildvariants:
- name: free_monitoring
- name: jsCore
- name: replica_sets_jscore_passthrough
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1804-test
@@ -8918,7 +8910,6 @@ buildvariants:
- name: sharding_jscore_passthrough
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1804-test
@@ -8949,7 +8940,6 @@ buildvariants:
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1804-test
@@ -9007,7 +8997,6 @@ buildvariants:
- name: .sharding .common !.multiversion
- name: .sharding .txns
- name: snmp
- - name: compile_packages_TG
- name: .publish
distros:
- ubuntu1804-test
@@ -9059,7 +9048,6 @@ buildvariants:
- name: slow1_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -9110,7 +9098,6 @@ buildvariants:
- name: .sharding .txns
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -9166,7 +9153,6 @@ buildvariants:
- name: slow1_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -9221,7 +9207,6 @@ buildvariants:
- name: .sharding .txns
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -9241,45 +9226,6 @@ buildvariants:
# Windows buildvariants #
###########################################
-- name: enterprise-windows-required
- display_name: "! Enterprise Windows"
- batchtime: 60 # 1 hour
- modules:
- - enterprise
- run_on:
- - windows-64-vs2017-test
- expansions: &enterprise_windows_expansions
- additional_targets: archive-mongocryptd archive-mongocryptd-debug
- msi_target: msi
- burn_in_tests_build_variant: enterprise-windows
- exe: ".exe"
- mh_target: archive-mh archive-mh-debug
- content_type: application/zip
- compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
- # We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
- # spawning a large number of linker processes.
- num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
- python: '/cygdrive/c/python/python37/python.exe'
- ext: zip
- scons_cache_scope: shared
- multiversion_platform: windows
- multiversion_edition: enterprise
- jstestfuzz_num_generated_files: 35
- target_resmoke_time: 20
- large_distro_name: windows-64-vs2017-compile
- tasks:
- - name: compile_TG
- distros:
- - windows-64-vs2017-compile
- - name: burn_in_tests_gen
- - name: buildscripts_test
- - name: unittest_shell_hang_analyzer_gen
- - name: server_discovery_and_monitoring_json_test_TG
- - name: dbtest_TG
- distros:
- - windows-64-vs2017-compile
- - name: noPassthrough_gen
-
- name: windows-debug
display_name: "* Windows DEBUG"
batchtime: 60 # 1 hour
@@ -9356,6 +9302,7 @@ buildvariants:
expansions:
exe: ".exe"
msi_target: msi
+ additional_targets: archive-mongocryptd archive-mongocryptd-debug
mh_target: archive-mh archive-mh-debug
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
@@ -9411,7 +9358,6 @@ buildvariants:
- name: sharding_ese_gen
- name: snmp
- name: unittest_shell_hang_analyzer_gen
- - name: compile_packages_TG
- name: push
distros:
- rhel70-small
@@ -9571,7 +9517,6 @@ buildvariants:
- name: .ssl
- name: .stitch
- name: .updatefuzzer
- - name: compile_packages_TG
- name: push
distros:
- rhel70-small
@@ -9649,7 +9594,6 @@ buildvariants:
- name: .sharding .txns
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: push
distros:
- rhel70-small
@@ -9707,7 +9651,6 @@ buildvariants:
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- - name: compile_packages_TG
- name: push
distros:
- rhel70-small
@@ -9872,7 +9815,6 @@ buildvariants:
- name: .stitch
- name: .updatefuzzer
- name: secondary_reads_passthrough_gen
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10323,7 +10265,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10381,7 +10322,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10541,7 +10481,6 @@ buildvariants:
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10591,7 +10530,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10643,7 +10581,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -10717,7 +10654,6 @@ buildvariants:
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: .publish
distros:
- rhel70
@@ -10775,7 +10711,6 @@ buildvariants:
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: .publish
distros:
- rhel70
@@ -10827,7 +10762,6 @@ buildvariants:
- name: snmp
- name: .stitch
- name: secondary_reads_passthrough_gen
- - name: compile_packages_TG
- name: .publish
distros:
- rhel62-large
@@ -10854,7 +10788,6 @@ buildvariants:
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- - name: compile_packages_TG
- name: .publish
distros:
- rhel70
@@ -10881,7 +10814,6 @@ buildvariants:
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- - name: compile_packages_TG
- name: .publish
distros:
- rhel62-large
@@ -10934,7 +10866,6 @@ buildvariants:
- name: snmp
- name: .stitch
- name: .watchdog
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -11042,7 +10973,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -11099,7 +11029,6 @@ buildvariants:
- name: snmp
- name: .stitch
- name: secondary_reads_passthrough_gen
- - name: compile_packages_TG
- name: .publish
distros:
- suse12-test
@@ -11131,7 +11060,6 @@ buildvariants:
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- - name: compile_packages_TG
- name: .publish
distros:
- suse12-test
@@ -11180,7 +11108,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: .publish
- name: enterprise-suse15-64
@@ -11219,7 +11146,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: .publish
- name: suse15
@@ -11265,7 +11191,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs !.multiversion
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: .publish
###########################################
@@ -11314,7 +11239,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -11367,7 +11291,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -11415,7 +11338,6 @@ buildvariants:
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
@@ -11468,7 +11390,6 @@ buildvariants:
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- - name: compile_packages_TG
- name: test_packages
distros:
- ubuntu1604-packer
diff --git a/site_scons/site_tools/auto_archive.py b/site_scons/site_tools/auto_archive.py
new file mode 100644
index 00000000000..fcd1ef2b469
--- /dev/null
+++ b/site_scons/site_tools/auto_archive.py
@@ -0,0 +1,273 @@
+import sys
+import os
+
+import SCons
+
+PACKAGE_ALIAS_MAP = "AIB_PACKAGE_ALIAS_MAP"
+AUTO_ARCHIVE_MAKE_ARCHIVE_CONTENT = """
+import os
+import sys
+
+USAGE = '''
+Usage: {} ARCHIVE_TYPE ARCHIVE_NAME ROOT_DIRECTORY FILES...
+
+FILES should be absolute paths or relative to ROOT_DIRECTORY.
+
+ARCHIVE_TYPE is one of zip or tar.
+'''
+
+if __name__ == "__main__":
+ if len(sys.argv) < 4:
+ print(sys.argv[0], "takes at minimum four arguments.")
+ print(USAGE.format(sys.argv[0]))
+ sys.exit(1)
+
+ archive_type = sys.argv[1]
+ archive_name = sys.argv[2]
+ root_dir = sys.argv[3]
+ files = sys.argv[4:]
+
+ if archive_type not in ("zip", "tar"):
+ print("unsupported archive_type", archive_type)
+ print(USAGE.format(sys.argv[0]))
+ sys.exit(1)
+
+ if archive_type == "zip":
+ import zipfile
+ archive = zipfile.ZipFile(archive_name, mode='w', compression=zipfile.ZIP_DEFLATED)
+ add_file = archive.write
+ else:
+ import tarfile
+ archive = tarfile.open(archive_name, mode='w:gz')
+ add_file = archive.add
+
+ os.chdir(root_dir)
+
+ for filename in files:
+ add_file(filename)
+
+ archive.close()
+"""
+
+
+def add_package_name_alias(env, component, role, name):
+ """Add a package name mapping for the combination of component and role."""
+ # Verify we didn't get a None or empty string for any argument
+ if not name:
+ raise Exception(
+ "when setting a package name alias must provide a name parameter"
+ )
+ if not component:
+ raise Exception("No component provided for package name alias")
+ if not role:
+ raise Exception("No role provided for package name alias")
+ env[PACKAGE_ALIAS_MAP][(component, role)] = name
+
+
+def get_package_name(env, component, role):
+ """Return the package file name for the component and role combination."""
+ basename = env[PACKAGE_ALIAS_MAP].get(
+ (component, role), "{component}-{role}".format(component=component, role=role)
+ )
+
+ return basename
+
+
+def collect_transitive_files(env, entry, cache=None):
+ """
+ Collect all installed and transitively installed files for entry.
+ """
+ if not cache:
+ cache = set()
+
+ files = set()
+ stack = [entry]
+
+ # Find all the files directly contained in the component DAG for entry and
+ # it's dependencies.
+ while stack:
+ s = stack.pop()
+ if s in cache:
+ continue
+ cache.add(s)
+
+ stack.extend(s.dependencies)
+ files.update(s.files)
+
+ # Now we will call the scanner to find the transtive files of any files that
+ # we found from the component DAG.
+ non_transitive_files = files.copy()
+ for f in non_transitive_files:
+ # scan_for_transitive_install is memoized so it's safe to call it in
+ # this loop. If it hasn't already run for a file we need to run it
+ # anyway.
+ transitive_files = set(env.GetTransitivelyInstalledFiles(f))
+ files.update(transitive_files)
+
+ return list(files)
+
+
+def auto_archive_gen(first_env, make_archive_script, pkg_fmt):
+ """Generate an archive task function for pkg_fmt where pkg_fmt is one of zip, tar, or auto."""
+
+ if pkg_fmt == "auto":
+ if first_env["PLATFORM"] == "win32":
+ pkg_fmt = "zip"
+ else:
+ pkg_fmt = "tar"
+
+ def auto_archive(env, component, role):
+ pkg_name = get_package_name(env, component, role)
+ install_alias = "install-{component}{role}".format(
+ component=component,
+ role="" if env.GetRoleDeclaration(role).silent else "-" + role,
+ )
+
+ if pkg_fmt == "zip":
+ pkg_suffix = "$AUTO_ARCHIVE_ZIP_SUFFIX"
+ else:
+ pkg_suffix = "$AUTO_ARCHIVE_TARBALL_SUFFIX"
+
+ archive = env.AutoArchive(
+ target="#{}.{}".format(pkg_name, pkg_suffix),
+ source=[make_archive_script] + env.Alias(install_alias),
+ __AUTO_ARCHIVE_TYPE=pkg_fmt,
+ AIB_COMPONENT=component,
+ AIB_ROLE=role,
+ )
+
+ # TODO: perhaps caching of packages / tarballs should be
+ # configurable? It's possible someone would want to do it.
+ env.NoCache(archive)
+ return archive
+
+ return auto_archive
+
+
+def archive_builder(source, target, env, for_signature):
+ """Build archives of the AutoInstall'd sources."""
+ if not source:
+ return []
+
+ source = env.Flatten([source])
+ common_ancestor = None
+
+ # Get the path elements that make up both DESTDIR and PREFIX. Then
+ # iterate the dest_dir_elems with the prefix path elements
+ # stripped off the end of the path converting them to strings for
+ # joining to make the common_ancestor.
+ #
+ # We pass the common_ancestor to tar via -C so that $PREFIX is
+ # preserved in the tarball.
+ dest_dir_elems = env.Dir("$DESTDIR").get_abspath()
+ prefix_elems = env.subst("$PREFIX")
+
+ # In python slicing a string with [:-0] gives an empty string. So
+ # make sure we have a prefix to slice off before trying it.
+ if prefix_elems:
+ common_ancestor = dest_dir_elems[: -len(prefix_elems)]
+ else:
+ common_ancestor = dest_dir_elems
+
+ archive_type = env["__AUTO_ARCHIVE_TYPE"]
+ make_archive_script = source[0].get_abspath()
+ tar_cmd = env.WhereIs("tar")
+ if archive_type == "tar" and tar_cmd:
+ command_prefix = "{tar} -C {common_ancestor} -czf {archive_name}"
+ else:
+ command_prefix = "{python} {make_archive_script} {archive_type} {archive_name} {common_ancestor}"
+
+ archive_name = env.File(target[0]).get_abspath()
+ command_prefix = command_prefix.format(
+ tar=tar_cmd,
+ python=sys.executable,
+ archive_type=archive_type,
+ archive_name=archive_name,
+ make_archive_script=make_archive_script,
+ common_ancestor=common_ancestor,
+ )
+
+ # If we are just being invoked for our signature, we can omit the indirect dependencies
+ # found by expanding the transitive dependencies, since we really only have a hard dependency
+ # on our direct dependencies.
+ if for_signature:
+ return command_prefix
+
+ component = env["AIB_COMPONENT"]
+ role = env["AIB_ROLE"]
+ entry = env["AIB_ALIAS_MAP"][component][role]
+
+ # Pre-process what should be in the archive. We need to pass the
+ # set of known installed files along to the transitive dependency
+ # walk so we can filter out files that aren't in the install
+ # directory.
+ installed = set(env.FindInstalledFiles())
+
+ # Collect all the installed files for our entry. This is doing a pure DAG
+ # walk idea of what should be. So we filter out any that are not in the
+ # installed set.
+ transitive_files = [
+ f for f in
+ collect_transitive_files(env, entry)
+ if f in installed
+ ]
+ if not transitive_files:
+ return []
+
+ # The env["ESCAPE"] function is used by scons to make arguments
+ # valid for the platform that we're running on. For instance it
+ # will properly quote paths that have spaces in them on Posix
+ # platforms and handle \ / on Windows.
+ escape_func = env.get("ESCAPE", lambda x: x)
+
+ # TODO: relpath is costly, and we do it for every file in the archive here.
+ # We should find a way to avoid the repeated relpath invocation, probably by
+ # bucketing by directory.
+ relative_files = [
+ escape_func(os.path.relpath(file.get_abspath(), common_ancestor))
+ for file in transitive_files
+ ]
+
+ return "{prefix} {files}".format(
+ prefix=command_prefix,
+ files=" ".join(relative_files)
+ )
+
+
+def exists(env):
+ return True
+
+
+def generate(env):
+ if not env.get("AUTO_INSTALL_ENABLED"):
+ env.Tool("auto_install_binaries")
+
+ bld = SCons.Builder.Builder(
+ action=SCons.Action.CommandGeneratorAction(
+ archive_builder,
+ {"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"},
+ )
+ )
+ env.Append(BUILDERS={"AutoArchive": bld})
+ env["AUTO_ARCHIVE_TARBALL_SUFFIX"] = env.get(
+ "AUTO_ARCHIVE_TARBALL_SUFFIX", "tar.gz"
+ )
+ env["AUTO_ARCHIVE_ZIP_SUFFIX"] = env.get("AUTO_ARCHIVE_ZIP_SUFFIX", "zip")
+ env[PACKAGE_ALIAS_MAP] = {}
+
+ env.AddMethod(add_package_name_alias, "AddPackageNameAlias")
+
+ # TODO: $BUILD_ROOT should be $VARIANT_DIR after we fix our dir
+ # setup later on.
+ make_archive_script = env.Textfile(
+ target="$BUILD_ROOT/aib_make_archive.py",
+ source=[AUTO_ARCHIVE_MAKE_ARCHIVE_CONTENT],
+ )
+
+ env.AppendUnique(
+ AIB_TASKS={
+ "tar": (auto_archive_gen(env, make_archive_script, "tar"), False),
+ "zip": (auto_archive_gen(env, make_archive_script, "zip"), False),
+ "archive": (auto_archive_gen(env, make_archive_script, "auto"), False),
+ }
+ )
diff --git a/site_scons/site_tools/auto_install_binaries.py b/site_scons/site_tools/auto_install_binaries.py
index d92ba63d241..2e9ad489413 100644
--- a/site_scons/site_tools/auto_install_binaries.py
+++ b/site_scons/site_tools/auto_install_binaries.py
@@ -12,102 +12,53 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# TODO: Versioned libraries
-# TODO: library dependency chaining for windows dynamic builds, static dev packages
-# TODO: Injectible component dependencies (jscore -> resmoke, etc.)
# TODO: Handle chmod state
-# TODO: Installing resmoke and configurations
-# TODO: package decomposition
-# TODO: Install/package target help text
-# TODO: implement sdk_headers
-
-import os
-import sys
-import shlex
-import itertools
+
from collections import defaultdict, namedtuple
import SCons
from SCons.Tool import install
ALIAS_MAP = "AIB_ALIAS_MAP"
+BASE_COMPONENT = "AIB_BASE_COMPONENT"
BASE_ROLE = "AIB_BASE_ROLE"
-COMPONENTS = "AIB_COMPONENTS_EXTRA"
-INSTALL_ACTIONS = "AIB_INSTALL_ACTIONS"
+COMPONENT = "AIB_COMPONENT"
+REVERSE_COMPONENT_DEPENDENCIES = "AIB_COMPONENTS_EXTRA"
+DEFAULT_COMPONENT = "AIB_DEFAULT_COMPONENT"
+INSTALLED_FILES = "AIB_INSTALLED_FILES"
+META_COMPONENT = "AIB_META_COMPONENT"
META_ROLE = "AIB_META_ROLE"
-PACKAGE_ALIAS_MAP = "AIB_PACKAGE_ALIAS_MAP"
-PRIMARY_COMPONENT = "AIB_COMPONENT"
-PRIMARY_ROLE = "AIB_ROLE"
-ROLES = "AIB_ROLES"
+ROLE = "AIB_ROLE"
ROLE_DECLARATIONS = "AIB_ROLE_DECLARATIONS"
SUFFIX_MAP = "AIB_SUFFIX_MAP"
+TASKS = "AIB_TASKS"
-AIB_MAKE_ARCHIVE_CONTENT = """
-import os
-import sys
-from shutil import which
-
-USAGE = '''
-Usage: {} ARCHIVE_TYPE ARCHIVE_NAME ROOT_DIRECTORY FILES...
-
-FILES should be absolute paths or relative to ROOT_DIRECTORY.
-
-ARCHIVE_TYPE is one of zip or tar.
-'''
-
-if __name__ == "__main__":
- if len(sys.argv) < 4:
- print(sys.argv[0], "takes at minimum four arguments.")
- print(USAGE.format(sys.argv[0]))
- sys.exit(1)
-
- archive_type = sys.argv[1]
- archive_name = sys.argv[2]
- root_dir = sys.argv[3]
- files = sys.argv[4:]
-
- if archive_type not in ("zip", "tar"):
- print("unsupported archive_type", archive_type)
- print(USAGE.format(sys.argv[0]))
- sys.exit(1)
-
- if archive_type == "tar" and which("tar") is not None:
- import subprocess
- import shlex
- tar = which("tar")
- tar_cmd = "{tar} -C {root_dir} -czf {archive_name} {files}".format(
- tar=tar,
- root_dir=root_dir,
- archive_name=archive_name,
- files=" ".join(files),
- )
- subprocess.run(shlex.split(tar_cmd))
- sys.exit(0)
- if archive_type == "zip":
- import zipfile
- archive = zipfile.ZipFile(archive_name, mode='w', compression=zipfile.ZIP_DEFLATED)
- add_file = archive.write
- else:
- print("WARNING: tar not found in $PATH, install the tar utility to greatly improve archive creation speed.")
- import tarfile
- archive = tarfile.open(archive_name, mode='w:gz')
- add_file = archive.add
+SuffixMap = namedtuple("SuffixMap", ["directory", "default_role"],)
+
- os.chdir(root_dir)
+class RoleInfo:
+ """A component/role union Node."""
- for filename in files:
- add_file(filename)
+ def __init__(self, component, role, files=None, dependencies=None):
+ self.id = "{}-{}".format(component, role)
+ self.component = component
+ self.role = role
+ if files is None:
+ self.files = set()
+ else:
+ self.files = set(files)
- archive.close()
-"""
+ if dependencies is None:
+ self.dependencies = set()
+ else:
+ self.dependencies = set(dependencies)
-RoleInfo = namedtuple(
- "RoleInfo",
- ["alias_name", "alias", "components", "roles", "actions", "dependencies"],
-)
+ def __str__(self):
+ return "RoleInfo({})".format(self.id)
-SuffixMap = namedtuple("SuffixMap", ["directory", "default_roles",],)
+ def __repr__(self):
+ return self.__str__()
class DeclaredRole:
@@ -119,7 +70,6 @@ class DeclaredRole:
else:
self.dependencies = {dep for dep in dependencies if dep is not None}
- self.transitive = transitive
self.silent = silent
@@ -130,7 +80,6 @@ def declare_role(env, **kwargs):
def declare_roles(env, roles, base_role=None, meta_role=None):
"""Given a list of role declarations, validate them and store them in the environment"""
-
role_names = [role.name for role in roles]
if len(role_names) != len(set(role_names)):
raise Exception("Cannot declare duplicate roles")
@@ -159,7 +108,7 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
"The base_role argument must be a string name of a role or a role object"
)
else:
- # Set it to something falsy
+ # Set it to something falsey
base_role = str()
if isinstance(meta_role, str):
@@ -207,10 +156,10 @@ def declare_roles(env, roles, base_role=None, meta_role=None):
env[ROLE_DECLARATIONS] = roles
-def generate_alias(env, component, role, target="install"):
+def generate_alias_name(env, component, role, task):
"""Generate a scons alias for the component and role combination"""
- return "{target}-{component}{role}".format(
- target=target,
+ return "{task}-{component}{role}".format(
+ task=task,
component=component,
role="" if env[ROLE_DECLARATIONS][role].silent else "-" + role,
)
@@ -218,368 +167,274 @@ def generate_alias(env, component, role, target="install"):
def get_alias_map_entry(env, component, role):
c_entry = env[ALIAS_MAP][component]
+
try:
return c_entry[role]
except KeyError:
- alias_name = generate_alias(env, component, role)
- r_entry = RoleInfo(
- alias_name=alias_name,
- alias=[],
- components=set(),
- roles=set(),
- actions=[],
- dependencies=[],
- )
+ r_entry = RoleInfo(component=component, role=role)
c_entry[role] = r_entry
- return r_entry
-
-def get_package_name(env, component, role):
- """Return the package file name for the component and role combination."""
- basename = env[PACKAGE_ALIAS_MAP].get(
- (component, role), "{component}-{role}".format(component=component, role=role)
- )
+ declaration = env[ROLE_DECLARATIONS].get(role)
+ for dep in declaration.dependencies:
+ dep_entry = get_alias_map_entry(env, component, dep)
+ r_entry.dependencies.add(dep_entry)
+
+ meta_component = env.get(META_COMPONENT)
+ if meta_component and component != meta_component:
+ meta_c_entry = get_alias_map_entry(env, meta_component, role)
+ meta_c_entry.dependencies.add(r_entry)
+
+ base_component = env.get(BASE_COMPONENT)
+ if base_component and component != base_component:
+ base_c_entry = get_alias_map_entry(env, base_component, role)
+ r_entry.dependencies.add(base_c_entry)
+
+ meta_role = env.get(META_ROLE)
+ if (
+ meta_role
+ and role != meta_role
+ and meta_component
+ and component != meta_component
+ ):
+ meta_r_entry = get_alias_map_entry(env, component, meta_role)
+ meta_c_r_entry = get_alias_map_entry(env, meta_component, meta_role)
+ meta_c_r_entry.dependencies.add(meta_r_entry)
- return basename
+ return r_entry
-def get_dependent_actions(
- env, components, roles, non_transitive_roles, node, cb=None,
-):
- """
- Check if node is a transitive dependency of components and roles
+def get_component(node):
+ return getattr(node.attributes, COMPONENT, None)
- If cb is not None and is callable then it will be called with all
- the arguments that get_dependent_actions was called with (except
- for cb itself) as well as the results of node_roles and the
- aib_install_actions that this function would have returned. The
- return of cb should be the dependent actions. This allows cb to
- access the results of scanning and modify the returned results via
- additional filtering.
- Returns the dependent actions.
- """
- actions = getattr(node.attributes, INSTALL_ACTIONS, None)
- if not actions:
- return []
+def get_role(node):
+ return getattr(node.attributes, ROLE, None)
- # Determine if the roles have any overlap with non_transitive_roles
- #
- # If they are overlapping then that means we can't transition to a
- # new role during scanning.
- if env[BASE_ROLE] not in roles:
- can_transfer = non_transitive_roles and roles.isdisjoint(non_transitive_roles)
- else:
- can_transfer = True
- node_roles = {
- role
- for role in getattr(node.attributes, ROLES, set())
- if role != env[META_ROLE]
- }
- if (
- # TODO: make the "always transitive" roles configurable
- env[BASE_ROLE] not in node_roles
- # If we are not transferrable
- and not can_transfer
- # Checks if we are actually crossing a boundry
- and node_roles.isdisjoint(roles)
- ):
+def scan_for_transitive_install(node, env, _path):
+ """Walk the children of node finding all installed dependencies of it."""
+ component = get_component(node.sources[0])
+ role = get_role(node.sources[0])
+ if component is None:
return []
- if cb is not None and callable(cb):
- return cb(components, roles, non_transitive_roles, node, node_roles, actions,)
- return actions
-
-
-def scan_for_transitive_install(node, env, cb=None):
- """Walk the children of node finding all installed dependencies of it."""
+ scanned = getattr(node.attributes, "AIB_SCANNED", [])
+ if scanned:
+ return scanned
+
+ # Access directly by keys because we don't want to accidentally
+ # create a new entry via get_alias_map_entry and instead should
+ # throw a KeyError if we got here without valid components and
+ # roles
+ alias_map = env[ALIAS_MAP]
+ entry = alias_map[component][role]
+ role_deps = env[ROLE_DECLARATIONS].get(role).dependencies
results = []
- install_sources = node.sources
- # Filter out all
- components = {
- component
- for component in getattr(node.sources[0].attributes, COMPONENTS, set())
- if component != "all"
- }
- roles = {
- role
- for role in getattr(node.sources[0].attributes, ROLES, set())
- if role != env[META_ROLE]
- }
- non_transitive_roles = {
- role for role in roles if env[ROLE_DECLARATIONS][role].transitive
- }
- for install_source in install_sources:
- install_executor = install_source.get_executor()
- if not install_executor:
- continue
- install_targets = install_executor.get_all_targets()
- if not install_targets:
+ # We have to explicitly look at the various BASE files here since it's not
+ # guaranteed they'll be pulled in anywhere in our grandchildren but we need
+ # to always depend upon them. For example if env.AutoInstall some file 'foo'
+ # tagged as common base but it's never used as a source for the
+ # AutoInstalled file we're looking at or the children of our children (and
+ # so on) then 'foo' would never get scanned in here without this explicit
+ # dependency adding.
+ base_component = env.get(BASE_COMPONENT)
+ if base_component and component != base_component:
+ base_role_entry = alias_map[base_component][role]
+ if base_role_entry.files:
+ results.extend(base_role_entry.files)
+
+ base_role = env.get(BASE_ROLE)
+ if base_role and role != base_role:
+ component_base_entry = alias_map[component][base_role]
+ if component_base_entry.files:
+ results.extend(component_base_entry.files)
+
+ if (
+ base_role
+ and base_component
+ and component != base_component
+ and role != base_role
+ ):
+ base_base_entry = alias_map[base_component][base_role]
+ if base_base_entry.files:
+ results.extend(base_base_entry.files)
+
+ installed_children = [
+ grandchild
+ for child in node.children()
+ for grandchild in child.children()
+ if grandchild.has_builder()
+ ]
+
+ for child in installed_children:
+ auto_installed_files = get_auto_installed_files(env, child)
+ if not auto_installed_files:
continue
- for install_target in install_targets:
- grandchildren = install_target.children()
- for grandchild in grandchildren:
- results.extend(
- get_dependent_actions(
- env, components, roles, non_transitive_roles, grandchild, cb=cb,
- )
- )
+
+ child_role = get_role(child)
+ if child_role == role or child_role in role_deps:
+ child_component = get_component(child)
+ child_entry = get_alias_map_entry(env, child_component, child_role)
+
+ # This is where component inheritance happens. We need a default
+ # component for everything so we can store it but if during
+ # transitive scanning we see a child with the default component here
+ # we will move that file to our component. This prevents
+ # over-stepping the DAG bounds since the default component is likely
+ # to be large and an explicitly tagged file is unlikely to depend on
+ # everything in it.
+ if child_component == env.get(DEFAULT_COMPONENT):
+ setattr(node.attributes, COMPONENT, component)
+ for f in auto_installed_files:
+ child_entry.files.discard(f)
+ entry.files.update(auto_installed_files)
+ elif component != child_component:
+ entry.dependencies.add(child_entry)
+
+ results.extend(auto_installed_files)
# Produce deterministic output for caching purposes
results = sorted(results, key=str)
+ setattr(node.attributes, "AIB_SCANNED", results)
return results
-def collect_transitive_files(env, source, installed, cache=None):
- """Collect all installed transitive files for source where source is a list of either Alias or File nodes."""
-
- if not cache:
- cache = set()
+def scan_for_transitive_install_pseudobuilder(env, node):
+ return scan_for_transitive_install(node, env, None)
- files = []
-
- for s in source:
- cache.add(s)
-
- if isinstance(s, SCons.Node.FS.File):
- if s not in installed:
- continue
- files.append(s)
-
- children_to_collect = []
- for child in s.children():
- if child in cache:
- continue
- if isinstance(child, SCons.Node.FS.File) and child not in installed:
- continue
- children_to_collect.append(child)
+def tag_components(env, target, **kwargs):
+ """Create component and role dependency objects"""
+ target = env.Flatten([target])
+ component = kwargs.get(COMPONENT)
+ role = kwargs.get(ROLE)
+ if component is not None and (not isinstance(component, str) or " " in component):
+ raise Exception("AIB_COMPONENT must be a string and contain no whitespace.")
- if children_to_collect:
- files.extend(
- collect_transitive_files(env, children_to_collect, installed, cache)
+ if component is None:
+ raise Exception(
+ "AIB_COMPONENT must be provided; untagged targets: {}".format(
+ [t.path for t in target]
)
+ )
- return files
-
-
-def archive_builder(source, target, env, for_signature):
- """Build archives of the AutoInstall'd sources."""
- if not source:
- return
-
- source = env.Flatten([source])
- make_archive_script = source[0].get_abspath()
-
- # We expect this to be a list of aliases, but really they could be
- # any sort of node.
- aliases = source[1:]
-
- common_ancestor = None
- archive_type = env["__AIB_ARCHIVE_TYPE"]
-
- # Get the path elements that make up both DESTDIR and PREFIX. Then
- # iterate the dest_dir_elems with the prefix path elements
- # stripped off the end of the path converting them to strings for
- # joining to make the common_ancestor.
- #
- # We pass the common_ancestor to tar via -C so that $PREFIX is
- # preserved in the tarball.
- dest_dir_elems = env.Dir("$DESTDIR").get_abspath()
- prefix_elems = env.subst("$PREFIX")
- # In python slicing a string with [:-0] gives an empty string. So
- # make sure we have a prefix to slice off before trying it.
- if prefix_elems:
- common_ancestor = dest_dir_elems[: -len(prefix_elems)]
- else:
- common_ancestor = dest_dir_elems
-
- archive_name = env.File(target[0]).get_abspath()
-
- command_prefix = "{python} {make_archive_script} {archive_type} {archive_name} {common_ancestor}".format(
- python=sys.executable,
- archive_type=archive_type,
- archive_name=archive_name,
- make_archive_script=make_archive_script,
- common_ancestor=common_ancestor,
- )
+ if role is None:
+ raise Exception("AIB_ROLE was not provided.")
- # If we are just being invoked for our signature, we can omit the indirect dependencies
- # found by expanding the transitive dependencies, since we really only have a hard dependency
- # on our direct depenedencies.
- if for_signature:
- return command_prefix
-
- # Pre-process what should be in the archive. We need to pass the
- # set of known installed files along to the transitive dependency
- # walk so we can filter out files that aren't in the install
- # directory.
- installed = env.get("__AIB_INSTALLED_SET", set())
- transitive_files = collect_transitive_files(env, aliases, installed)
- paths = {file.get_abspath() for file in transitive_files}
-
- # The env["ESCAPE"] function is used by scons to make arguments
- # valid for the platform that we're running on. For instance it
- # will properly quote paths that have spaces in them on Posix
- # platforms and handle \ / on Windows.
- escape_func = env.get("ESCAPE", lambda x: x)
-
- # TODO: relpath is costly, and we do it for every file in the archive here. We should
- # find a way to avoid the repeated relpath invocation, probably by bucketing by directory.
- relative_files = " ".join(
- [escape_func(os.path.relpath(path, common_ancestor)) for path in paths]
- )
+ for t in target:
+ t.attributes.keep_targetinfo = 1
+ setattr(t.attributes, COMPONENT, component)
+ setattr(t.attributes, ROLE, role)
- return " ".join([command_prefix, relative_files])
+ entry = get_alias_map_entry(env, component, role)
+ # We cannot wire back dependencies to any combination of meta role, meta
+ # component or base component. These cause dependency cycles because
+ # get_alias_map_entry will do that wiring for us then we will try to
+ # map them back on themselves in our loop.
+ if (
+ component != env.get(BASE_COMPONENT)
+ and role != env.get(META_ROLE)
+ and component != env.get(META_COMPONENT)
+ ):
+ for component in kwargs.get(REVERSE_COMPONENT_DEPENDENCIES, []):
+ component_dep = get_alias_map_entry(env, component, role)
+ component_dep.dependencies.add(entry)
-def auto_install(env, target, source, **kwargs):
- """Auto install builder."""
- source = [env.Entry(s) for s in env.Flatten([source])]
- roles = {
- kwargs.get(PRIMARY_ROLE),
- }
+ return entry
- if env[META_ROLE]:
- roles.add(env[META_ROLE])
- if kwargs.get(ROLES) is not None:
- roles = roles.union(set(kwargs[ROLES]))
+def auto_install_task(env, component, role):
+ """Auto install task."""
+ entry = get_alias_map_entry(env, component, role)
+ return list(entry.files)
- component = kwargs.get(PRIMARY_COMPONENT)
- if component is not None and (not isinstance(component, str) or " " in component):
- raise Exception("AIB_COMPONENT must be a string and contain no whitespace.")
- components = {
- component,
- # The 'all' tag is implicitly attached as a component
- "all",
- }
- # Some tools will need to create multiple components so we add
- # this "hidden" argument that accepts a set or list.
- #
- # Use get here to check for existence because it is rarely
- # ommitted as a kwarg (because it is set by the default emitter
- # for all common builders), but is often set to None.
- if kwargs.get(COMPONENTS) is not None:
- components = components.union(set(kwargs[COMPONENTS]))
+def auto_install_pseudobuilder(env, target, source, **kwargs):
+ """Auto install pseudo-builder."""
+ source = env.Flatten([source])
+ source = [env.File(s) for s in source]
+ entry = env.TagComponents(source, **kwargs)
- # Remove false values such as None or ""
- roles = {role for role in roles if role}
- components = {component for component in components if component}
+ installed_files = []
+ for s in source:
+ if not target:
+ auto_install_mapping = env[SUFFIX_MAP].get(s.get_suffix())
+ if not auto_install_mapping:
+ raise Exception(
+ "No target provided and no auto install mapping found for:", str(s)
+ )
- actions = []
+ target = auto_install_mapping.directory
- for s in source:
- s.attributes.keep_targetinfo = 1
- setattr(s.attributes, COMPONENTS, components)
- setattr(s.attributes, ROLES, roles)
+ # We've already auto installed this file and it may have belonged to a
+ # different role since it wouldn't get retagged above. So we just skip
+ # this files since SCons will already wire the dependency since s is a
+ # source and so the file will get installed. A common error here is
+ # adding debug files to the runtime component file if we do not skip
+ # this.
+ existing_installed_files = get_auto_installed_files(env, s)
+ if existing_installed_files:
+ continue
- # We must do an early subst here so that the _aib_debugdir
+ # We must do an eearly subst here so that the _aib_debugdir
# generator has a chance to run while seeing 'source'.
#
# TODO: Find a way to not need this early subst.
- target = env.Dir(env.subst(target, source=source))
+ target = env.Dir(env.subst(target, source=s))
+ new_installed_files = env.Install(target=target, source=s)
+ setattr(s.attributes, INSTALLED_FILES, new_installed_files)
- action = env.Install(target=target, source=s,)
-
- setattr(
- s.attributes,
- INSTALL_ACTIONS,
- action if isinstance(action, (list, set)) else [action],
- )
- actions.append(action)
+ installed_files.extend(new_installed_files)
- actions = env.Flatten(actions)
- for component, role in itertools.product(components, roles):
-
- entry = get_alias_map_entry(env, component, role)
- entry.components.update(components)
- entry.roles.update(roles)
- entry.actions.extend(actions)
-
- # TODO: this hard codes behavior that should be done configurably
- if component != "common":
- dentry = get_alias_map_entry(env, "common", role)
- entry.dependencies.append(dentry)
-
- return actions
+ entry.files.update(installed_files)
+ return installed_files
def finalize_install_dependencies(env):
- """Generates package aliases and wires install dependencies."""
-
- installed = set(env.FindInstalledFiles())
-
- for component, rolemap in env[ALIAS_MAP].items():
- for role, info in rolemap.items():
- info.alias.extend(env.Alias(info.alias_name, info.actions))
- setattr(info.alias[0].attributes, COMPONENTS, info.components)
- setattr(info.alias[0].attributes, ROLES, info.roles)
- env.Depends(info.alias, [d.alias for d in info.dependencies])
-
- common_rolemap = env[ALIAS_MAP].get("common")
- default_rolemap = env[ALIAS_MAP].get("default")
-
- if default_rolemap and "runtime" in default_rolemap:
- env.Alias("install", "install-default")
- env.Default("install")
-
- # TODO: $BUILD_ROOT should be $VARIANT_DIR after we fix our dir
- # setup later on.
- make_archive_script = env.Textfile(
- target="$BUILD_ROOT/aib_make_archive.py", source=[AIB_MAKE_ARCHIVE_CONTENT],
- )
-
- for component, rolemap in env[ALIAS_MAP].items():
- for role, info in rolemap.items():
-
- if common_rolemap and component != "common" and role in common_rolemap:
- env.Depends(info.alias, common_rolemap[role].alias)
-
- role_decl = env[ROLE_DECLARATIONS].get(role)
- for dependency in role_decl.dependencies:
- dependency_info = rolemap.get(dependency, [])
- if dependency_info:
- env.Depends(info.alias, dependency_info.alias)
-
- pkg_name = get_package_name(env, component, role)
-
- for fmt in ("zip", "tar"):
- if fmt == "zip":
- pkg_suffix = "$AIB_ZIP_SUFFIX"
- else:
- pkg_suffix = "$AIB_TARBALL_SUFFIX"
-
- archive = env.__AibArchive(
- target="$PKGDIR/{}.{}".format(pkg_name, pkg_suffix),
- source=[make_archive_script] + info.alias,
- __AIB_ARCHIVE_TYPE=fmt,
- __AIB_INSTALLED_SET=installed,
- AIB_COMPONENT=component,
- AIB_ROLE=role,
- )
-
- if not env.get("AIB_CACHE_ARCHIVES", False):
- env.NoCache(archive)
-
- compression_alias = generate_alias(env, component, role, target=fmt)
- env.Alias(compression_alias, archive)
-
- default_fmt = "zip" if env["PLATFORM"] == "win32" else "tar"
- archive_alias = generate_alias(env, component, role, target="archive")
- default_compression_alias = generate_alias(env, component, role, target=default_fmt)
- env.Alias(archive_alias, default_compression_alias)
-
+ """Generates task aliases and wires install dependencies."""
+
+ # Wire up component dependencies and generate task aliases
+ for task, func in env[TASKS].items():
+ generate_dependent_aliases = True
+
+ # The task map is a map of string task names (i.e. "install" by default)
+ # to either a tuple or function. If it's a function we assume that we
+ # generate dependent aliases for that task, otherwise if it's a tuple we
+ # deconstruct it here to get the function (the first element) and a
+ # boolean indicating whether or not to generate dependent aliases for
+ # that task. For example the "archive" task added by the auto_archive
+ # tool disables them because tarballs do not track dependencies so you
+ # do not want archive-foo to build archive-bar as well if foo depends on
+ # bar.
+ if isinstance(func, tuple):
+ func, generate_dependent_aliases = func
+
+ for component, rolemap in env[ALIAS_MAP].items():
+ for role, info in rolemap.items():
+ alias_name = generate_alias_name(env, component, role, task)
+ alias = env.Alias(alias_name, func(env, component, role))
+ if generate_dependent_aliases:
+ dependent_aliases = env.Flatten(
+ [
+ env.Alias(
+ generate_alias_name(env, d.component, d.role, task)
+ )
+ for d in info.dependencies
+ ]
+ )
+ env.Alias(alias, dependent_aliases)
def auto_install_emitter(target, source, env):
"""When attached to a builder adds an appropriate AutoInstall to that Builder."""
+
for t in target:
- entry = env.Entry(t)
- suffix = entry.get_suffix()
+ if isinstance(t, str):
+ t = env.File(t)
+
+ suffix = t.get_suffix()
if env.get("AIB_IGNORE", False):
continue
@@ -589,18 +444,17 @@ def auto_install_emitter(target, source, env):
# way available to us.
#
# We're working with upstream to expose this information.
- if "conftest" in str(entry):
+ if "conftest" in str(t):
continue
auto_install_mapping = env[SUFFIX_MAP].get(suffix)
if auto_install_mapping is not None:
env.AutoInstall(
auto_install_mapping.directory,
- entry,
- AIB_COMPONENT=env.get(PRIMARY_COMPONENT),
- AIB_ROLE=env.get(PRIMARY_ROLE),
- AIB_ROLES=auto_install_mapping.default_roles,
- AIB_COMPONENTS_EXTRA=env.get(COMPONENTS),
+ t,
+ AIB_COMPONENT=env.get(COMPONENT, env.get(DEFAULT_COMPONENT, None)),
+ AIB_ROLE=env.get(ROLE, auto_install_mapping.default_role),
+ AIB_COMPONENTS_EXTRA=env.get(REVERSE_COMPONENT_DEPENDENCIES, []),
)
return (target, source)
@@ -611,7 +465,7 @@ def add_suffix_mapping(env, suffix, role=None):
if isinstance(suffix, str):
if role not in env[ROLE_DECLARATIONS]:
raise Exception(
- "target {} is not a known role. Available roles are {}".format(
+ "target {} is not a known role available roles are {}".format(
role, env[ROLE_DECLARATIONS].keys()
)
)
@@ -621,34 +475,20 @@ def add_suffix_mapping(env, suffix, role=None):
raise Exception("source must be a dictionary or a string")
for _, mapping in suffix.items():
- for role in mapping.default_roles:
- if role not in env[ROLE_DECLARATIONS]:
- raise Exception(
- "target {} is not a known role. Available roles are {}".format(
- target, env[ROLE_DECLARATIONS].keys()
- )
+ role = mapping.default_role
+ if role not in env[ROLE_DECLARATIONS]:
+ raise Exception(
+ "target {} is not a known role. Available roles are {}".format(
+ target, env[ROLE_DECLARATIONS].keys()
)
+ )
env[SUFFIX_MAP].update({env.subst(key): value for key, value in suffix.items()})
-def add_package_name_alias(env, component, role, name):
- """Add a package name mapping for the combination of component and role."""
- # Verify we didn't get a None or empty string for any argument
- if not name:
- raise Exception(
- "when setting a package name alias must provide a name parameter"
- )
- if not component:
- raise Exception("No component provided for package name alias")
- if not role:
- raise Exception("No role provided for package name alias")
- env[PACKAGE_ALIAS_MAP][(component, role)] = name
-
-
-def suffix_mapping(env, directory=False, default_roles=False):
+def suffix_mapping(env, directory=False, default_role=False):
"""Generate a SuffixMap object from source and target."""
- return SuffixMap(directory=directory, default_roles=default_roles,)
+ return SuffixMap(directory=directory, default_role=default_role)
def dest_dir_generator(initial_value=None):
@@ -684,37 +524,51 @@ def dest_dir_generator(initial_value=None):
return generator
-def exists(_env):
- """Always activate this tool."""
- return True
+def get_auto_installed_files(env, node):
+ return getattr(node.attributes, INSTALLED_FILES, [])
def list_components(env, **kwargs):
"""List registered components for env."""
print("Known AIB components:")
- for key in sorted(env[ALIAS_MAP]):
+ for key in env[ALIAS_MAP]:
print("\t", key)
-def list_targets(env, **kwargs):
- """List AIB generated targets for env."""
- print("Generated AIB targets:")
- for _, rolemap in env[ALIAS_MAP].items():
- for _, info in rolemap.items():
- print("\t", info.alias[0].name)
+def list_recursive(mapping, counter=0):
+ if counter == 0:
+ print(" " * counter, mapping.id)
+ counter += 1
+ for dep in mapping.dependencies:
+ print(" " * counter, dep.id)
+ list_recursive(dep, counter=counter)
+
+
+def list_targets(dag_mode=False):
+ def target_lister(env, **kwargs):
+ if dag_mode:
+ installed_files = set(env.FindInstalledFiles())
+ for f in installed_files:
+ scan_for_transitive_install(f, env, None)
+
+ mapping = env[ALIAS_MAP][env[META_COMPONENT]][env[META_ROLE]]
+ list_recursive(mapping)
+
+ return target_lister
+
+
+def get_role_declaration(env, role):
+ return env[ROLE_DECLARATIONS][role]
+
+
+def exists(_env):
+ """Always activate this tool."""
+ return True
def generate(env): # pylint: disable=too-many-statements
"""Generate the auto install builders."""
- bld = SCons.Builder.Builder(
- action=SCons.Action.CommandGeneratorAction(
- archive_builder,
- {"cmdstr": "Building package ${TARGETS[0]} from ${SOURCES[1:]}"},
- )
- )
- env.Append(BUILDERS={"__AibArchive": bld})
- env["AIB_TARBALL_SUFFIX"] = env.get("AIB_TARBALL_SUFFIX", "tar.gz")
- env["AIB_ZIP_SUFFIX"] = env.get("AIB_ZIP_SUFFIX", "zip")
+ env["AUTO_INSTALL_ENABLED"] = True
# Matches the autoconf documentation:
# https://www.gnu.org/prep/standards/html_node/Directory-Variables.html
@@ -724,18 +578,25 @@ def generate(env): # pylint: disable=too-many-statements
env["PREFIX_SHAREDIR"] = env.get("PREFIX_SHAREDIR", "$DESTDIR/share")
env["PREFIX_DOCDIR"] = env.get("PREFIX_DOCDIR", "$PREFIX_SHAREDIR/doc")
env["PREFIX_INCLUDEDIR"] = env.get("PREFIX_INCLUDEDIR", "$DESTDIR/include")
- env["PKGDIR"] = env.get("PKGDIR", "$VARIANT_DIR/pkgs")
env[SUFFIX_MAP] = {}
- env[PACKAGE_ALIAS_MAP] = {}
env[ALIAS_MAP] = defaultdict(dict)
- env.AddMethod(suffix_mapping, "SuffixMap")
+ env[TASKS] = {
+ "install": auto_install_task,
+ }
+
+ env.AddMethod(
+ scan_for_transitive_install_pseudobuilder, "GetTransitivelyInstalledFiles"
+ )
+ env.AddMethod(get_role_declaration, "GetRoleDeclaration")
+ env.AddMethod(get_auto_installed_files, "GetAutoInstalledFiles")
+ env.AddMethod(tag_components, "TagComponents")
+ env.AddMethod(auto_install_pseudobuilder, "AutoInstall")
env.AddMethod(add_suffix_mapping, "AddSuffixMapping")
- env.AddMethod(add_package_name_alias, "AddPackageNameAlias")
- env.AddMethod(auto_install, "AutoInstall")
- env.AddMethod(finalize_install_dependencies, "FinalizeInstallDependencies")
env.AddMethod(declare_role, "Role")
env.AddMethod(declare_roles, "DeclareRoles")
+ env.AddMethod(finalize_install_dependencies, "FinalizeInstallDependencies")
+ env.AddMethod(suffix_mapping, "SuffixMap")
env.Tool("install")
# TODO: we should probably expose these as PseudoBuilders and let
@@ -743,15 +604,18 @@ def generate(env): # pylint: disable=too-many-statements
env.Alias("list-aib-components", [], [list_components])
env.AlwaysBuild("list-aib-components")
- env.Alias("list-aib-targets", [], [list_targets])
+ env.Alias("list-aib-targets", [], [list_targets(dag_mode=False)])
env.AlwaysBuild("list-aib-targets")
+ env.Alias("list-aib-dag", [], [list_targets(dag_mode=True)])
+ env.AlwaysBuild("list-aib-dag")
+
for builder in ["Program", "SharedLibrary", "LoadableModule", "StaticLibrary"]:
builder = env["BUILDERS"][builder]
base_emitter = builder.emitter
# TODO: investigate if using a ListEmitter here can cause
# problems if AIB is not loaded last
- new_emitter = SCons.Builder.ListEmitter([base_emitter, auto_install_emitter,])
+ new_emitter = SCons.Builder.ListEmitter([base_emitter, auto_install_emitter])
builder.emitter = new_emitter
base_install_builder = install.BaseInstallBuilder
diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py
index 22139bf6d25..af72458dacc 100644
--- a/site_scons/site_tools/mongo_benchmark.py
+++ b/site_scons/site_tools/mongo_benchmark.py
@@ -35,8 +35,14 @@ def build_benchmark(env, target, source, **kwargs):
kwargs["LIBDEPS"] = libdeps
benchmark_test_components = {"tests", "benchmarks"}
- if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENT"].endswith("-benchmark"):
+ primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", ""))
+ if primary_component and not primary_component.endswith("-benchmark"):
kwargs["AIB_COMPONENT"] += "-benchmark"
+ elif primary_component:
+ kwargs["AIB_COMPONENT"] = primary_component
+ else:
+ kwargs["AIB_COMPONENT"] = "benchmarks"
+ benchmark_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
benchmark_test_components = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index 4cc89fc57bd..469d629ade1 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -15,8 +15,14 @@ def build_cpp_integration_test(env, target, source, **kwargs):
kwargs["LIBDEPS"] = libdeps
integration_test_components = {"tests", "integration-tests"}
- if "AIB_COMPONENT" in kwargs and not kwargs["AIB_COMPONENT"].endswith("-test"):
+ primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", ""))
+ if primary_component and not primary_component.endswith("-test"):
kwargs["AIB_COMPONENT"] += "-test"
+ elif primary_component:
+ kwargs["AIB_COMPONENT"] = primary_component
+ else:
+ kwargs["AIB_COMPONENT"] = "integration-tests"
+ integration_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
diff --git a/site_scons/site_tools/mongo_test_list.py b/site_scons/site_tools/mongo_test_list.py
index 163bfe08293..564f4652625 100644
--- a/site_scons/site_tools/mongo_test_list.py
+++ b/site_scons/site_tools/mongo_test_list.py
@@ -21,14 +21,14 @@ TEST_REGISTRY = defaultdict(list)
def register_test(env, file, test):
"""Register test into the dictionary of tests for file_name"""
- test_path = test.path
- if getattr(test.attributes, "AIB_INSTALL_ACTIONS", []):
- test_path = getattr(test.attributes, "AIB_INSTALL_ACTIONS")[0].path
+ test_path = test
+ if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test):
+ test_path = env.GetAutoInstalledFiles(test)[0]
if SCons.Util.is_String(file):
file = env.File(file)
- env.Depends(file, test)
+ env.Depends(file, test_path)
file_name = file.path
TEST_REGISTRY[file_name].append(test_path)
env.GenerateTestExecutionAliases(test)
@@ -41,7 +41,7 @@ def test_list_builder_action(env, target, source):
else:
filename = target[0].path
- source = [env.subst(s) if SCons.Util.is_String(s) else s.path for s in source]
+ source = [env.File(s).path if SCons.Util.is_String(s) else s.path for s in source]
with open(filename, "w") as ofile:
tests = TEST_REGISTRY[filename]
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index 27a0c9a7b5b..43fddca9234 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -18,15 +18,12 @@ from SCons.Script import Action
def register_unit_test(env, test):
"""
- Kept around for compatibility.
-
- Some SConscripts called RegisterUnitTest directly.
+ Kept around for compatibility with non-hygienic builds. The only callers of
+ this should be the intel_readtest_wrapper SConscript. All original callers
+ have been updated to use UNITTEST_HAS_CUSTOM_MAINLINE.
"""
env.RegisterTest("$UNITTEST_LIST", test)
- aib_install_actions = getattr(test.attributes, "AIB_INSTALL_ACTIONS", [])
- if aib_install_actions:
- env.Alias("$UNITTEST_ALIAS", aib_install_actions)
- else:
+ if not env.get("AUTO_INSTALL_ENABLED", False):
env.Alias("$UNITTEST_ALIAS", test)
@@ -35,16 +32,20 @@ def exists(env):
def build_cpp_unit_test(env, target, source, **kwargs):
- libdeps = kwargs.get("LIBDEPS", [])
- libdeps.append("$BUILD_DIR/mongo/unittest/unittest_main")
+ if not kwargs.get("UNITTEST_HAS_CUSTOM_MAINLINE", False):
+ libdeps = kwargs.get("LIBDEPS", [])
+ libdeps.append("$BUILD_DIR/mongo/unittest/unittest_main")
+ kwargs["LIBDEPS"] = libdeps
- kwargs["LIBDEPS"] = libdeps
unit_test_components = {"tests", "unittests"}
primary_component = kwargs.get("AIB_COMPONENT", env.get("AIB_COMPONENT", ""))
if primary_component and not primary_component.endswith("-test"):
kwargs["AIB_COMPONENT"] = primary_component + "-test"
elif primary_component:
kwargs["AIB_COMPONENT"] = primary_component
+ else:
+ kwargs["AIB_COMPONENT"] = "unittests"
+ unit_test_components = {"tests"}
if "AIB_COMPONENTS_EXTRA" in kwargs:
kwargs["AIB_COMPONENTS_EXTRA"] = set(kwargs["AIB_COMPONENTS_EXTRA"]).union(
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index 64a84eae124..f12d196834b 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -484,6 +484,7 @@ mongod = env.Program(
"dist",
"dist-test",
"servers",
+ "integration-tests",
],
)
@@ -589,7 +590,8 @@ mongos = env.Program(
"core",
"dist",
"dist-test",
- "servers"
+ "servers",
+ "integration-tests",
]
)
@@ -687,6 +689,7 @@ if not has_option('noshell') and usemozjs:
"dist",
"dist-test",
"shell",
+ "integration-tests",
],
)
@@ -787,7 +790,6 @@ if hygienic:
distsrc.File('MPL-2'),
],
AIB_COMPONENT='common',
- AIB_COMPONENTS_EXTRA=['dist'],
AIB_ROLE='base',
)
else:
@@ -866,8 +868,7 @@ else:
source=[
compass_installer,
],
- AIB_COMPONENT='tools',
- AIB_COMPONENTS_EXTRA=['dist'],
+ AIB_COMPONENT='dist',
AIB_ROLE='runtime',
)
diff --git a/src/mongo/client/sdam/SConscript b/src/mongo/client/sdam/SConscript
index ce12275e3e9..0a10d22332f 100644
--- a/src/mongo/client/sdam/SConscript
+++ b/src/mongo/client/sdam/SConscript
@@ -36,7 +36,6 @@ sdam_json_test = env.Program(
'$BUILD_DIR/mongo/client/connection_string',
'$BUILD_DIR/mongo/util/options_parser/options_parser'],
)[0]
-env.RegisterUnitTest(sdam_json_test)
env.Library(
target='sdam_test',
diff --git a/src/mongo/embedded/mongo_embedded/SConscript b/src/mongo/embedded/mongo_embedded/SConscript
index 44f7c02ebec..99bf61ad99c 100644
--- a/src/mongo/embedded/mongo_embedded/SConscript
+++ b/src/mongo/embedded/mongo_embedded/SConscript
@@ -86,7 +86,7 @@ yamlEnv = env.Clone()
yamlEnv.InjectThirdParty(libraries=['yaml'])
if get_option('link-model') != 'dynamic-sdk':
- mongoEmbeddedTest = yamlEnv.Program(
+ mongoEmbeddedTest = yamlEnv.CppUnitTest(
target='mongo_embedded_test',
source=[
'mongo_embedded_test.cpp',
@@ -102,14 +102,10 @@ if get_option('link-model') != 'dynamic-sdk':
'$BUILD_DIR/mongo/util/options_parser/options_parser',
'mongo_embedded',
],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
AIB_COMPONENT='embedded-test',
- AIB_COMPONENTS_EXTRA=[
- 'tests',
- ],
)
- env.RegisterUnitTest(mongoEmbeddedTest[0])
-
# Frameworkization craziness begins here. Honestly, we should do this
# better in the future in some re-usable way, but we need to get this
# thing out the door, so here goes.
diff --git a/src/mongo/embedded/mongoc_embedded/SConscript b/src/mongo/embedded/mongoc_embedded/SConscript
index d7db9c1cb07..d4bca37472c 100644
--- a/src/mongo/embedded/mongoc_embedded/SConscript
+++ b/src/mongo/embedded/mongoc_embedded/SConscript
@@ -87,7 +87,7 @@ yamlEnv.InjectThirdParty(libraries=['yaml'])
if get_option('link-model') != 'dynamic-sdk':
mongocEmbeddedTestEnv = create_mongoc_env(yamlEnv)
- clientTest = mongocEmbeddedTestEnv.Program(
+ clientTest = mongocEmbeddedTestEnv.CppUnitTest(
target='mongoc_embedded_test',
source=[
'mongoc_embedded_test.cpp',
@@ -100,14 +100,10 @@ if get_option('link-model') != 'dynamic-sdk':
'$BUILD_DIR/mongo/util/options_parser/options_parser',
'mongoc_embedded',
],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
AIB_COMPONENT='embedded-test',
- AIB_COMPONENTS_EXTRA=[
- 'tests',
- ],
)
- env.RegisterUnitTest(clientTest[0]);
-
# Frameworkization craziness begins here. Honestly, we should do this
# better in the future in some re-usable way, but we need to get this
# thing out the door, so here goes.
diff --git a/src/mongo/embedded/stitch_support/SConscript b/src/mongo/embedded/stitch_support/SConscript
index b1090b97371..6252a77f889 100644
--- a/src/mongo/embedded/stitch_support/SConscript
+++ b/src/mongo/embedded/stitch_support/SConscript
@@ -59,7 +59,7 @@ if get_option('install-mode') == 'hygienic':
if get_option('link-model') != 'dynamic-sdk':
stitchSupportTestEnv = env.Clone()
- unitTest = stitchSupportTestEnv.Program(
+ unitTest = stitchSupportTestEnv.CppUnitTest(
target="stitch_support_test",
source=[
"stitch_support_test.cpp",
@@ -68,11 +68,6 @@ if get_option('link-model') != 'dynamic-sdk':
'$BUILD_DIR/mongo/unittest/unittest',
'stitch_support',
],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
AIB_COMPONENT='stitch-support-test',
- AIB_COMPONENTS_EXTRA=[
- 'tests',
- 'unittests',
- ],
)
-
- env.RegisterUnitTest(unitTest[0]);
diff --git a/src/mongo/logger/SConscript b/src/mongo/logger/SConscript
index c0761bdac92..7be4a25268f 100644
--- a/src/mongo/logger/SConscript
+++ b/src/mongo/logger/SConscript
@@ -20,6 +20,7 @@ env.Program(
'$BUILD_DIR/mongo/base',
'$BUILD_DIR/mongo/unittest/unittest_main'
],
+ AIB_COMPONENT="standalone-console-test",
)
env.CppUnitTest(
diff --git a/src/mongo/shell/utils.js b/src/mongo/shell/utils.js
index f1a406336a9..05c61f2a2eb 100644
--- a/src/mongo/shell/utils.js
+++ b/src/mongo/shell/utils.js
@@ -259,7 +259,7 @@ jsTestName = function() {
return "__unknown_name__";
};
-var _jsTestOptions = {enableTestCommands: true}; // Test commands should be enabled by default
+var _jsTestOptions = {};
jsTestOptions = function() {
if (TestData) {
@@ -364,10 +364,6 @@ jsTestOptions = function() {
return _jsTestOptions;
};
-setJsTestOption = function(name, value) {
- _jsTestOptions[name] = value;
-};
-
jsTestLog = function(msg) {
if (typeof msg === "object") {
msg = tojson(msg);
@@ -381,7 +377,6 @@ jsTest = {};
jsTest.name = jsTestName;
jsTest.options = jsTestOptions;
-jsTest.setOption = setJsTestOption;
jsTest.log = jsTestLog;
jsTest.readOnlyUserRoles = ["read"];
jsTest.basicUserRoles = ["dbOwner"];
diff --git a/src/mongo/stdx/SConscript b/src/mongo/stdx/SConscript
index a93289ff543..02af02ec6f3 100644
--- a/src/mongo/stdx/SConscript
+++ b/src/mongo/stdx/SConscript
@@ -35,17 +35,19 @@ env.CppUnitTest(
],
)
-# Not a CppUnitTest because it needs low-level control of thread creation and signals,
-# so it shouldn't use unittest_main and typical mongo startup routines.
-env.RegisterUnitTest(env.Program(
+# Specify UNITTEST_HAS_CUSTOM_MAINLINE because it needs low-level control of
+# thread creation and signals, so it shouldn't use unittest_main and typical
+# mongo startup routines.
+env.CppUnitTest(
target='sigaltstack_location_test',
source=[
'sigaltstack_location_test.cpp',
],
LIBDEPS=[
'stdx',
- ]
-)[0])
+ ],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
+)
# The tests for `stdx::set_terminate` need to run outside of the mongo unittest harneses.
# The tests require altering the global `set_terminate` handler, which our unittest framework
@@ -56,45 +58,49 @@ env.RegisterUnitTest(env.Program(
# robust testing of this mechanism.
# Needs to be a different test -- It has to have direct control over the `main()` entry point.
-env.RegisterUnitTest(env.Program(
+env.CppUnitTest(
target='set_terminate_dispatch_test',
source=[
'set_terminate_dispatch_test.cpp',
],
LIBDEPS=[
'stdx',
- ]
-)[0])
+ ],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
+)
# Needs to be a different test -- It has to have direct control over the `main()` entry point.
-env.RegisterUnitTest(env.Program(
+env.CppUnitTest(
target='set_terminate_from_main_die_in_thread_test',
source=[
'set_terminate_from_main_die_in_thread_test.cpp',
],
LIBDEPS=[
'stdx',
- ]
-)[0])
+ ],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
+)
# Needs to be a different test -- It has to have direct control over the `main()` entry point.
-env.RegisterUnitTest(env.Program(
+env.CppUnitTest(
target='set_terminate_from_thread_die_in_main_test',
source=[
'set_terminate_from_thread_die_in_main_test.cpp',
],
LIBDEPS=[
'stdx',
- ]
-)[0])
+ ],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
+)
# Needs to be a different test -- It has to have direct control over the `main()` entry point.
-env.RegisterUnitTest(env.Program(
+env.CppUnitTest(
target='set_terminate_from_thread_die_in_thread_test',
source=[
'set_terminate_from_thread_die_in_thread_test.cpp',
],
LIBDEPS=[
'stdx',
- ]
-)[0])
+ ],
+ UNITTEST_HAS_CUSTOM_MAINLINE=True,
+)
diff --git a/src/mongo/tools/SConscript b/src/mongo/tools/SConscript
index 7a500c2631b..840000a4217 100644
--- a/src/mongo/tools/SConscript
+++ b/src/mongo/tools/SConscript
@@ -29,6 +29,5 @@ mongobridge = env.Program(
'$BUILD_DIR/mongo/util/options_parser/options_parser_init',
'$BUILD_DIR/mongo/util/signal_handlers',
],
- AIB_COMPONENT='tools',
- AIB_COMPONENTS_EXTRA=["dist-test"],
+ AIB_COMPONENT='dist-test',
)
diff --git a/src/third_party/IntelRDFPMathLib20U1/SConscript b/src/third_party/IntelRDFPMathLib20U1/SConscript
index e5ba80446d6..58e1b7ba653 100644
--- a/src/third_party/IntelRDFPMathLib20U1/SConscript
+++ b/src/third_party/IntelRDFPMathLib20U1/SConscript
@@ -406,6 +406,17 @@ readtest_wrapper = env.Substfile(
SUBST_DICT=readtest_dict,
)
env.Depends(readtest_wrapper, readtest_input)
+if get_option("install-mode") == "hygienic":
+ readtest_wrapper_install = env.AutoInstall(
+ target="$PREFIX_BINDIR",
+ source=readtest_wrapper,
+ AIB_ROLE="runtime",
+ AIB_COMPONENT="intel-test",
+ AIB_COMPONENTS_EXTRA=[
+ "unittests",
+ "tests",
+ ],
+ )
if env.TargetOSIs('windows'):
readtest_wrapper_bat = env.Substfile(
@@ -426,23 +437,13 @@ if env.TargetOSIs('windows'):
"tests",
],
)
- env.RegisterUnitTest(readtest_wrapper_bat_install[0])
+ env.RegisterTest("$UNITTEST_LIST", readtest_wrapper_bat_install[0])
else:
env.RegisterUnitTest(readtest_wrapper_bat[0])
else:
if get_option("install-mode") == "hygienic":
- readtest_wrapper_install = env.AutoInstall(
- target="$PREFIX_BINDIR",
- source=readtest_wrapper,
- AIB_ROLE="runtime",
- AIB_COMPONENT="intel-test",
- AIB_COMPONENTS_EXTRA=[
- "unittests",
- "tests",
- ],
- )
- env.RegisterUnitTest(readtest_wrapper_install[0])
+ env.RegisterTest("$UNITTEST_LIST", readtest_wrapper_install[0])
else:
env.RegisterUnitTest(readtest_wrapper[0])
diff --git a/src/third_party/wiredtiger/SConscript b/src/third_party/wiredtiger/SConscript
index 8247432fc4e..d6bd665e23e 100644
--- a/src/third_party/wiredtiger/SConscript
+++ b/src/third_party/wiredtiger/SConscript
@@ -228,7 +228,7 @@ wtbin = wtbinEnv.Program(
# however, we must link with snappy, etc. as C++. The smart_link() function isn't used by
# default on Windows, so we leave the value unchanged on other platforms.
LINK="$CXX" if wtbinEnv["LINK"] == "$SMARTLINK" else wtbinEnv["LINK"],
- AIB_COMPONENT="tools",
+ AIB_COMPONENT="dist-test",
)
hygienic = get_option('install-mode') == 'hygienic'