summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore9
-rwxr-xr-xbuildscripts/evergreen_gen_multiversion_tests.py3
-rwxr-xr-xbuildscripts/evergreen_generate_resmoke_tasks.py51
-rw-r--r--buildscripts/patch_builds/selected_tests_service.py27
-rw-r--r--buildscripts/selected_tests.py270
-rw-r--r--buildscripts/tests/test_evergreen_generate_resmoke_tasks.py7
-rw-r--r--buildscripts/tests/test_selected_tests.py333
-rw-r--r--etc/evergreen.yml76
8 files changed, 571 insertions, 205 deletions
diff --git a/.gitignore b/.gitignore
index 66aaf48b32d..4aa101ca973 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,10 +20,10 @@
*.tar.gz
*.suo
*.ncb
-*.idb
-*.obj
-*.opt
-*.pch
+*.idb
+*.obj
+*.opt
+*.pch
*.jsh
*.jsall
*.pyc
@@ -178,6 +178,7 @@ local.properties
compile_commands.json
generated_resmoke_config
+selected_tests_config
# Code review tool config
codereview.rc
diff --git a/buildscripts/evergreen_gen_multiversion_tests.py b/buildscripts/evergreen_gen_multiversion_tests.py
index ae77fafcbfc..cd077662ce4 100755
--- a/buildscripts/evergreen_gen_multiversion_tests.py
+++ b/buildscripts/evergreen_gen_multiversion_tests.py
@@ -277,10 +277,9 @@ class EvergreenConfigGenerator(object):
start_date = end_date - datetime.timedelta(days=generate_resmoke.LOOKBACK_DURATION_DAYS)
suites = gen_suites.calculate_suites(start_date, end_date)
# Render the given suites into yml files that can be used by resmoke.py.
- should_create_misc_suite = not bool(self.options.selected_tests_to_run)
config_file_dict = generate_resmoke.render_suite_files(suites, self.options.suite,
gen_suites.test_list, TEST_SUITE_DIR,
- should_create_misc_suite)
+ self.options.create_misc_suite)
generate_resmoke.write_file_dict(CONFIG_DIR, config_file_dict)
if burn_in_test is not None:
diff --git a/buildscripts/evergreen_generate_resmoke_tasks.py b/buildscripts/evergreen_generate_resmoke_tasks.py
index 190955b6c80..8dd1eaeb4dd 100755
--- a/buildscripts/evergreen_generate_resmoke_tasks.py
+++ b/buildscripts/evergreen_generate_resmoke_tasks.py
@@ -179,11 +179,12 @@ class ConfigOptions(object):
return config.get(item, None)
- def generate_display_task(self, task_names: List[str]):
+ def generate_display_task(self, task_names: List[str]) -> DisplayTaskDefinition:
"""
Generate a display task with execution tasks.
:param task_names: The names of the execution tasks to include under the display task.
+ :return: Display task definition for the generated display task.
"""
return DisplayTaskDefinition(self.task) \
.execution_tasks(task_names) \
@@ -776,7 +777,7 @@ class GenerateSubSuites(object):
Filter relevant tests.
:param tests_runtimes: List of tuples containing test names and test runtimes.
- return: Filtered TestRuntime objects indicating tests to be run.
+ :return: Filtered TestRuntime objects indicating tests to be run.
"""
tests_runtimes = self.filter_existing_tests(tests_runtimes)
if self.config_options.selected_tests_to_run:
@@ -807,37 +808,35 @@ class GenerateSubSuites(object):
"""List the test files that are part of the suite being split."""
return suitesconfig.get_suite(self.config_options.suite).tests
- def render_evergreen_config(self, shrub_config: Configuration, suites: List[Suite]) -> str:
+ def generate_task_config(self, shrub_config: Configuration, suites: List[Suite]):
"""
Generate the evergreen configuration for the new suite.
:param shrub_config: Shrub configuration the generated Evergreen config will be added to.
:param suites: The suite the generated Evergreen config will be generated for.
- :return: The generated Evergreen config string in json format.
"""
- evg_config_gen = EvergreenConfigGenerator(shrub_config, suites, self.config_options,
- self.evergreen_api)
- evg_config = evg_config_gen.generate_config()
- return evg_config.to_json()
+ EvergreenConfigGenerator(shrub_config, suites, self.config_options,
+ self.evergreen_api).generate_config()
- def generate_task_config_and_suites(self, shrub_config: Configuration) -> Tuple[dict, str]:
+ def generate_suites_config(self, suites: List[Suite]) -> Tuple[dict, str]:
+ """
+ Generate the suites files and evergreen configuration for the generated task.
+
+ :return: The suites files and evergreen configuration for the generated task.
+ """
+ return render_suite_files(suites, self.config_options.suite, self.test_list,
+ self.config_options.test_suites_dir,
+ self.config_options.create_misc_suite)
+
+ def get_suites(self) -> List[Suite]:
"""
Generate the suites files and evergreen configuration for the generated task.
- :param shrub_config: Shrub configuration the generated Evergreen config will be added to.
:return: The suites files and evergreen configuration for the generated task.
"""
end_date = datetime.datetime.utcnow().replace(microsecond=0)
start_date = end_date - datetime.timedelta(days=LOOKBACK_DURATION_DAYS)
- suites = self.calculate_suites(start_date, end_date)
-
- LOGGER.debug("Creating suites", num_suites=len(suites), task=self.config_options.task,
- dir=self.config_options.generated_config_dir)
- suite_files_dict = render_suite_files(suites, self.config_options.suite, self.test_list,
- self.config_options.test_suites_dir,
- self.config_options.create_misc_suite)
- shrub_task_config = self.render_evergreen_config(shrub_config, suites)
- return suite_files_dict, shrub_task_config
+ return self.calculate_suites(start_date, end_date)
def run(self):
"""Generate resmoke suites that run within a target execution time and write to disk."""
@@ -846,11 +845,17 @@ class GenerateSubSuites(object):
LOGGER.info("Not generating configuration due to previous successful generation.")
return
+ suites = self.get_suites()
+ LOGGER.debug("Creating suites", num_suites=len(suites), task=self.config_options.task,
+ dir=self.config_options.generated_config_dir)
+
+ config_dict_of_suites = self.generate_suites_config(suites)
+
shrub_config = Configuration()
- suite_files_dict, shrub_task_config = self.generate_task_config_and_suites(shrub_config)
+ self.generate_task_config(shrub_config, suites)
- suite_files_dict[self.config_options.task + ".json"] = shrub_task_config
- write_file_dict(self.config_options.generated_config_dir, suite_files_dict)
+ config_dict_of_suites[self.config_options.task + ".json"] = shrub_config.to_json()
+ write_file_dict(self.config_options.generated_config_dir, config_dict_of_suites)
def filter_specified_tests(specified_tests: Set[str], tests_runtimes: List[teststats.TestRuntime]):
@@ -859,7 +864,7 @@ def filter_specified_tests(specified_tests: Set[str], tests_runtimes: List[tests
:param specified_tests: List of test files that should be run.
:param tests_runtimes: List of tuples containing test names and test runtimes.
- return: List of TestRuntime tuples that match specified_tests.
+ :return: List of TestRuntime tuples that match specified_tests.
"""
return [info for info in tests_runtimes if info.test_name in specified_tests]
diff --git a/buildscripts/patch_builds/selected_tests_service.py b/buildscripts/patch_builds/selected_tests_service.py
index 4cd7cd33013..72292601c6f 100644
--- a/buildscripts/patch_builds/selected_tests_service.py
+++ b/buildscripts/patch_builds/selected_tests_service.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Selected Tests service."""
-from typing import Set
+from typing import Any, Dict, Set
import requests
import yaml
@@ -45,13 +45,13 @@ class SelectedTestsService(object):
return None
- def get_test_mappings(self, threshold: float, changed_files: Set[str]):
+ def get_test_mappings(self, threshold: float, changed_files: Set[str]) -> Dict[str, Any]:
"""
- Request related test files from selected-tests service and filter them.
+ Request related test files from selected-tests service.
:param threshold: Threshold for test file correlation.
:param changed_files: Set of changed_files.
- return: Set of related test files returned by selected-tests service.
+ :return: Related test files returned by selected-tests service.
"""
payload = {"threshold": threshold, "changed_files": ",".join(changed_files)}
response = requests.get(
@@ -63,3 +63,22 @@ class SelectedTestsService(object):
response.raise_for_status()
return response.json()["test_mappings"]
+
+ def get_task_mappings(self, threshold: float, changed_files: Set[str]) -> Dict[str, Any]:
+ """
+ Request related tasks from selected-tests service.
+
+ :param threshold: Threshold for test file correlation.
+ :param changed_files: Set of changed_files.
+ :return: Related tasks returned by selected-tests service.
+ """
+ payload = {"threshold": threshold, "changed_files": ",".join(changed_files)}
+ response = requests.get(
+ self.url + f"/projects/{self.project}/task-mappings",
+ params=payload,
+ headers=self.headers,
+ cookies=self.cookies,
+ )
+ response.raise_for_status()
+
+ return response.json()["task_mappings"]
diff --git a/buildscripts/selected_tests.py b/buildscripts/selected_tests.py
index 39c940ad071..3a37fb98ea8 100644
--- a/buildscripts/selected_tests.py
+++ b/buildscripts/selected_tests.py
@@ -3,6 +3,7 @@
import logging
import os
+import re
import sys
from typing import Any, Dict, List, Optional, Set, Tuple
@@ -12,7 +13,7 @@ from structlog.stdlib import LoggerFactory
from evergreen.api import EvergreenApi, RetryingEvergreenApi
from git import Repo
from shrub.config import Configuration
-from shrub.variant import DisplayTaskDefinition
+from shrub.variant import DisplayTaskDefinition, Variant
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
@@ -25,6 +26,7 @@ from buildscripts.burn_in_tests import create_task_list_for_tests, is_file_a_tes
from buildscripts.ciconfig.evergreen import (
EvergreenProjectConfig,
ResmokeArgs,
+ Task,
parse_evergreen_file,
)
from buildscripts.evergreen_generate_resmoke_tasks import (
@@ -49,8 +51,37 @@ EXTERNAL_LOGGERS = {
"git",
"urllib3",
}
-SELECTED_TESTS_CONFIG_DIR = "generated_resmoke_config"
-THRESHOLD_FOR_RELATED_TESTS = 0.1
+SELECTED_TESTS_CONFIG_DIR = "selected_tests_config"
+RELATION_THRESHOLD = 0.1
+
+COMPILE_TASK_PATTERN = re.compile(".*compile.*")
+CONCURRENCY_TASK_PATTERN = re.compile("concurrency.*")
+INTEGRATION_TASK_PATTERN = re.compile("integration.*")
+FUZZER_TASK_PATTERN = re.compile(".*fuzz.*")
+GENERATE_TASK_PATTERN = re.compile("burn_in.*")
+LINT_TASK_PATTERN = re.compile("lint.*")
+STITCH_TASK_PATTERN = re.compile("stitch.*")
+EXCLUDE_TASK_PATTERNS = [
+ COMPILE_TASK_PATTERN, CONCURRENCY_TASK_PATTERN, INTEGRATION_TASK_PATTERN, FUZZER_TASK_PATTERN,
+ GENERATE_TASK_PATTERN, LINT_TASK_PATTERN, STITCH_TASK_PATTERN
+]
+
+CPP_TASK_NAMES = [
+ "dbtest",
+ "idl_tests",
+ "unittests",
+]
+PUBLISH_TASK_NAMES = [
+ "package",
+ "publish_packages",
+ "push",
+]
+PYTHON_TESTS = ["buildscripts_test"]
+EXCLUDE_TASK_LIST = [
+ *CPP_TASK_NAMES,
+ *PYTHON_TESTS,
+ *PUBLISH_TASK_NAMES,
+]
class SelectedTestsConfigOptions(ConfigOptions):
@@ -58,11 +89,13 @@ class SelectedTestsConfigOptions(ConfigOptions):
@classmethod
# pylint: disable=too-many-arguments,W0221
- def from_file(cls, filepath: str, overwrites: Dict[str, Any], required_keys: Set[str],
- defaults: Dict[str, Any], formats: Dict[str, type]):
+ def from_file(cls, origin_variant_expansions: Dict[str, str], filepath: str,
+ overwrites: Dict[str, Any], required_keys: Set[str], defaults: Dict[str, Any],
+ formats: Dict[str, type]):
"""
Create an instance of SelectedTestsConfigOptions based on the given config file.
+ :param origin_variant_expansions: Expansions of the origin build variant.
:param filepath: Path to file containing configuration.
:param overwrites: Dict of configuration values to overwrite those listed in filepath.
:param required_keys: Set of keys required by this config.
@@ -71,7 +104,8 @@ class SelectedTestsConfigOptions(ConfigOptions):
:return: Instance of SelectedTestsConfigOptions.
"""
config_from_file = read_config.read_config_file(filepath)
- return cls({**config_from_file, **overwrites}, required_keys, defaults, formats)
+ return cls({**origin_variant_expansions, **config_from_file, **overwrites}, required_keys,
+ defaults, formats)
@property
def run_tests_task(self):
@@ -91,13 +125,14 @@ class SelectedTestsConfigOptions(ConfigOptions):
@property
def create_misc_suite(self):
"""Whether or not a _misc suite file should be created."""
- return False
+ return not self.selected_tests_to_run
- def generate_display_task(self, task_names: List[str]):
+ def generate_display_task(self, task_names: List[str]) -> DisplayTaskDefinition:
"""
Generate a display task with execution tasks.
:param task_names: The names of the execution tasks to include under the display task.
+ :return: Display task definition for the generated display task.
"""
return DisplayTaskDefinition(f"{self.task}_{self.variant}").execution_tasks(task_names)
@@ -118,19 +153,18 @@ def _configure_logging(verbose: bool):
logging.getLogger(log_name).setLevel(logging.WARNING)
-def _find_related_test_files(
+def _find_selected_test_files(
selected_tests_service: SelectedTestsService,
changed_files: Set[str],
) -> Set[str]:
"""
- Request related test files from selected-tests service.
+ Request related test files from selected-tests service and filter invalid files.
:param selected_tests_service: Selected-tests service.
:param changed_files: Set of changed_files.
- return: Set of test files returned by selected-tests service that are valid test files.
+ :return: Set of test files returned by selected-tests service that are valid test files.
"""
- test_mappings = selected_tests_service.get_test_mappings(THRESHOLD_FOR_RELATED_TESTS,
- changed_files)
+ test_mappings = selected_tests_service.get_test_mappings(RELATION_THRESHOLD, changed_files)
return {
test_file["name"]
for test_mapping in test_mappings for test_file in test_mapping["test_files"]
@@ -138,12 +172,54 @@ def _find_related_test_files(
}
-def _get_selected_tests_task_configuration(expansion_file):
+def _find_selected_tasks(selected_tests_service: SelectedTestsService, changed_files: Set[str],
+ build_variant_config: Variant) -> Set[str]:
+ """
+ Request tasks from selected-tests and filter out tasks that don't exist or should be excluded.
+
+ :param selected_tests_service: Selected-tests service.
+ :param changed_files: Set of changed_files.
+ :param build_variant_config: Config of build variant to collect task info from.
+ :return: Set of tasks returned by selected-tests service that should not be excluded.
+ """
+ task_mappings = selected_tests_service.get_task_mappings(RELATION_THRESHOLD, changed_files)
+ returned_task_names = {
+ task["name"]
+ for task_mapping in task_mappings for task in task_mapping["tasks"]
+ }
+ existing_task_names = set()
+ for task_name in returned_task_names:
+ task = _find_task(build_variant_config, task_name)
+ if task:
+ if task.name in EXCLUDE_TASK_LIST or any(
+ regex.match(task.name) for regex in EXCLUDE_TASK_PATTERNS):
+ LOGGER.debug("Excluding task from analysis because it is not a jstest",
+ task=task_name)
+ continue
+ existing_task_names.add(task.name)
+ return existing_task_names
+
+
+def _find_task(build_variant_config: Variant, task_name: str) -> Task:
+ """
+ Look up shrub config for task.
+
+ :param build_variant_config: Config of build variant to collect task info from.
+ :param task_name: Name of task to get info for.
+ :return: Task configuration.
+ """
+ task = build_variant_config.get_task(task_name)
+ if not task:
+ task = build_variant_config.get_task(task_name + "_gen")
+ return task
+
+
+def _get_selected_tests_task_config(expansion_file: str) -> Dict[str, str]:
"""
Look up task config of the selected tests task.
:param expansion_file: Configuration file.
- return: Task configuration values.
+ :return: Task configuration values.
"""
expansions = read_config.read_config_file(expansion_file)
return {
@@ -153,23 +229,20 @@ def _get_selected_tests_task_configuration(expansion_file):
}
-def _get_evg_task_configuration(
- evg_conf: EvergreenProjectConfig,
- build_variant: str,
+def _get_evg_task_config(
+ expansion_file: str,
task_name: str,
- test_list_info: dict,
-):
+ build_variant_config: Variant,
+) -> Dict[str, Any]:
"""
Look up task config of the task to be generated.
- :param evg_conf: Evergreen configuration.
- :param build_variant: Build variant to collect task info from.
- :param task_name: Name of task to get info for.
- :param test_list_info: The value for a given task_name in the tests_by_task dict.
- return: Task configuration values.
+ :param expansion_file: Configuration file.
+ :param task_name: Task to get info for.
+ :param build_variant_config: Config of build variant to collect task info from.
+ :return: Task configuration values.
"""
- evg_build_variant = evg_conf.get_variant(build_variant)
- task = evg_build_variant.get_task(task_name)
+ task = build_variant_config.get_task(task_name)
if task.is_generate_resmoke_task:
task_vars = task.generate_resmoke_tasks_command["vars"]
else:
@@ -180,49 +253,136 @@ def _get_evg_task_configuration(
if suite_name:
task_vars.update({"suite": suite_name})
+ # the suites argument will run all tests in a suite even when individual
+ # tests are specified in resmoke_args, so we remove it
resmoke_args_without_suites = ResmokeArgs.remove_arg(task_vars["resmoke_args"], "suites")
task_vars["resmoke_args"] = resmoke_args_without_suites
+ selected_tests_task_config = _get_selected_tests_task_config(expansion_file)
+
return {
- "task_name": task_name, "build_variant": build_variant,
- "selected_tests_to_run": set(test_list_info["tests"]), **task_vars
+ "task_name": task.name, "build_variant": build_variant_config.name, **task_vars,
+ **selected_tests_task_config
}
-def _generate_shrub_config(evg_api: EvergreenApi, evg_conf: EvergreenProjectConfig,
- expansion_file: str, tests_by_task: dict, build_variant: str):
+def _update_config_with_task(evg_api: EvergreenApi, shrub_config: Configuration,
+ config_options: SelectedTestsConfigOptions,
+ config_dict_of_suites_and_tasks: Dict[str, str]):
"""
- Generate a dict containing file names and contents for the generated configs.
+ Generate the suites config and the task shrub config for a given task config.
:param evg_api: Evergreen API object.
- :param evg_conf: Evergreen configuration.
+ :param shrub_config: Shrub configuration for task.
+ :param config_options: Task configuration options.
+ :param config_dict_of_suites_and_tasks: Dict of shrub configs and suite file contents.
+ """
+ task_generator = GenerateSubSuites(evg_api, config_options)
+ suites = task_generator.get_suites()
+
+ config_dict_of_suites = task_generator.generate_suites_config(suites)
+ config_dict_of_suites_and_tasks.update(config_dict_of_suites)
+
+ task_generator.generate_task_config(shrub_config, suites)
+
+
+def _get_task_configs_for_test_mappings(expansion_file: str, tests_by_task: Dict[str, Any],
+ build_variant_config: Variant) -> Dict[str, dict]:
+ """
+ For test mappings, generate a dict containing task names and their config settings.
+
:param expansion_file: Configuration file.
:param tests_by_task: Dictionary of tests and tasks to run.
+ :param build_variant_config: Config of build variant to collect task info from.
+ :return: Dict of task names and their config settings.
+ """
+ evg_task_configs = {}
+ for task_name, test_list_info in tests_by_task.items():
+ evg_task_config = _get_evg_task_config(expansion_file, task_name, build_variant_config)
+ evg_task_config.update({"selected_tests_to_run": set(test_list_info["tests"])})
+ LOGGER.debug("Calculated evg_task_config values", evg_task_config=evg_task_config)
+ evg_task_configs[task_name] = evg_task_config
+
+ return evg_task_configs
+
+
+def _get_task_configs_for_task_mappings(expansion_file: str, related_tasks: List[str],
+ build_variant_config: Variant) -> Dict[str, dict]:
+ """
+ For task mappings, generate a dict containing task names and their config settings.
+
+ :param expansion_file: Configuration file.
+ :param related_tasks: List of tasks to run.
+ :param build_variant_config: Config of build variant to collect task info from.
+ :return: Dict of task names and their config settings.
+ """
+ evg_task_configs = {}
+ for task_name in related_tasks:
+ evg_task_config = _get_evg_task_config(expansion_file, task_name, build_variant_config)
+ LOGGER.debug("Calculated evg_task_config values", evg_task_config=evg_task_config)
+ evg_task_configs[task_name] = evg_task_config
+
+ return evg_task_configs
+
+
+# pylint: disable=too-many-arguments, too-many-locals
+def run(evg_api: EvergreenApi, evg_conf: EvergreenProjectConfig, expansion_file: str,
+ selected_tests_service: SelectedTestsService, changed_files: Set[str],
+ build_variant: str) -> Dict[str, dict]:
+ """
+ Run code to select tasks to run based on test mappings and task mappings.
+
+ :param evg_api: Evergreen API object.
+ :param evg_conf: Evergreen configuration.
+ :param expansion_file: Configuration file.
+ :param selected_tests_config: Location of config file to connect to elected-tests service.
+ :param changed_files: Set of changed_files.
:param build_variant: Build variant to collect task info from.
- return: Dict of files and file contents for generated tasks.
+ :return: Dict of files and file contents for generated tasks.
"""
shrub_config = Configuration()
- shrub_task_config = None
- config_dict_of_generated_tasks = {}
- for task_name, test_list_info in tests_by_task.items():
- evg_task_config = _get_evg_task_configuration(evg_conf, build_variant, task_name,
- test_list_info)
- selected_tests_task_config = _get_selected_tests_task_configuration(expansion_file)
- evg_task_config.update(selected_tests_task_config)
- LOGGER.debug("Calculated overwrite_values", overwrite_values=evg_task_config)
+ config_dict_of_suites_and_tasks = {}
+
+ task_configs = {}
+ build_variant_config = evg_conf.get_variant(build_variant)
+
+ related_test_files = _find_selected_test_files(selected_tests_service, changed_files)
+ LOGGER.debug("related test files found", related_test_files=related_test_files)
+ if related_test_files:
+ tests_by_task = create_task_list_for_tests(related_test_files, build_variant, evg_conf)
+ LOGGER.debug("tests and tasks found", tests_by_task=tests_by_task)
+
+ test_mapping_task_configs = _get_task_configs_for_test_mappings(
+ expansion_file, tests_by_task, build_variant_config)
+ task_configs.update(test_mapping_task_configs)
+
+ related_tasks = _find_selected_tasks(selected_tests_service, changed_files,
+ build_variant_config)
+ LOGGER.debug("related tasks found", related_tasks=related_tasks)
+ if related_tasks:
+ task_mapping_task_configs = _get_task_configs_for_task_mappings(
+ expansion_file, related_tasks, build_variant_config)
+ # task_mapping_task_configs will overwrite test_mapping_task_configs
+ # because task_mapping_task_configs will run all tests rather than a subset of tests and we
+ # should err on the side of running all tests
+ task_configs.update(task_mapping_task_configs)
+
+ origin_variant_expansions = build_variant_config.expansions
+
+ for task_config in task_configs.values():
config_options = SelectedTestsConfigOptions.from_file(
+ origin_variant_expansions,
expansion_file,
- evg_task_config,
+ task_config,
REQUIRED_CONFIG_KEYS,
DEFAULT_CONFIG_VALUES,
CONFIG_FORMAT_FN,
)
- suite_files_dict, shrub_task_config = GenerateSubSuites(
- evg_api, config_options).generate_task_config_and_suites(shrub_config)
- config_dict_of_generated_tasks.update(suite_files_dict)
- if shrub_task_config:
- config_dict_of_generated_tasks["selected_tests_config.json"] = shrub_task_config
- return config_dict_of_generated_tasks
+ _update_config_with_task(evg_api, shrub_config, config_options,
+ config_dict_of_suites_and_tasks)
+
+ config_dict_of_suites_and_tasks["selected_tests_config.json"] = shrub_config.to_json()
+ return config_dict_of_suites_and_tasks
@click.command()
@@ -254,7 +414,6 @@ def _generate_shrub_config(evg_api: EvergreenApi, evg_conf: EvergreenProjectConf
metavar="FILE",
help="Configuration file with connection info for selected tests service.",
)
-# pylint: disable=too-many-arguments
def main(
verbose: bool,
expansion_file: str,
@@ -281,15 +440,10 @@ def main(
changed_files = find_changed_files(repo)
buildscripts.resmokelib.parser.set_options()
LOGGER.debug("Found changed files", files=changed_files)
- related_test_files = _find_related_test_files(selected_tests_service, changed_files)
- LOGGER.debug("related test files found", related_test_files=related_test_files)
- if related_test_files:
- tests_by_task = create_task_list_for_tests(related_test_files, build_variant, evg_conf)
- LOGGER.debug("tests and tasks found", tests_by_task=tests_by_task)
- config_dict_of_generated_tasks = _generate_shrub_config(evg_api, evg_conf, expansion_file,
- tests_by_task, build_variant)
- write_file_dict(SELECTED_TESTS_CONFIG_DIR, config_dict_of_generated_tasks)
+ config_dict_of_suites_and_tasks = run(evg_api, evg_conf, expansion_file, selected_tests_service,
+ changed_files, build_variant)
+ write_file_dict(SELECTED_TESTS_CONFIG_DIR, config_dict_of_suites_and_tasks)
if __name__ == "__main__":
diff --git a/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py b/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
index af074d78c78..cc2774de148 100644
--- a/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
+++ b/buildscripts/tests/test_evergreen_generate_resmoke_tasks.py
@@ -334,6 +334,13 @@ class TestConfigOptions(unittest.TestCase):
self.assertEqual(config["build_id"], config_options.run_tests_build_id)
+ def test_create_misc_suite(self):
+ config = {}
+
+ config_options = under_test.ConfigOptions(config)
+
+ self.assertTrue(config_options.create_misc_suite)
+
def test_item_with_format_function_works(self):
config = {"number": "1"}
formats = {"number": int}
diff --git a/buildscripts/tests/test_selected_tests.py b/buildscripts/tests/test_selected_tests.py
index 2d1a28a5db2..6d88e1e8f49 100644
--- a/buildscripts/tests/test_selected_tests.py
+++ b/buildscripts/tests/test_selected_tests.py
@@ -3,6 +3,7 @@ import os
import unittest
from mock import MagicMock, patch
+from shrub.config import Configuration
# pylint: disable=wrong-import-position
import buildscripts.ciconfig.evergreen as _evergreen
@@ -40,6 +41,7 @@ class TestSelectedTestsConfigOptions(unittest.TestCase):
@patch(ns("read_config"))
def test_overwrites_overwrite_filepath_config(self, read_config_mock):
filepath = MagicMock()
+ origin_variant_expansions = {"key1": 0}
read_config_mock.read_config_file.return_value = {"key1": 1}
overwrites = {"key1": 2}
required_keys = {"key1"}
@@ -47,13 +49,14 @@ class TestSelectedTestsConfigOptions(unittest.TestCase):
formats = {"key1": int}
config_options = under_test.SelectedTestsConfigOptions.from_file(
- filepath, overwrites, required_keys, defaults, formats)
+ origin_variant_expansions, filepath, overwrites, required_keys, defaults, formats)
self.assertEqual(overwrites["key1"], config_options.key1)
@patch(ns("read_config"))
def test_overwrites_overwrite_defaults(self, read_config_mock):
filepath = MagicMock()
+ origin_variant_expansions = {}
read_config_mock.read_config_file.return_value = {"key1": 1}
overwrites = {"key1": 2}
required_keys = {"key1"}
@@ -61,10 +64,26 @@ class TestSelectedTestsConfigOptions(unittest.TestCase):
formats = {"key1": int}
config_options = under_test.SelectedTestsConfigOptions.from_file(
- filepath, overwrites, required_keys, defaults, formats)
+ origin_variant_expansions, filepath, overwrites, required_keys, defaults, formats)
self.assertEqual(overwrites["key1"], config_options.key1)
+ @patch(ns("read_config"))
+ def test_filepath_config_overrides_origin_expansions(self, read_config_mock):
+ filepath = MagicMock()
+ origin_variant_expansions = {"key1": 0}
+ filepath_config = {"key1": 1}
+ read_config_mock.read_config_file.return_value = filepath_config
+ overwrites = {}
+ required_keys = {"key1"}
+ defaults = {}
+ formats = {"key1": int}
+
+ config_options = under_test.SelectedTestsConfigOptions.from_file(
+ origin_variant_expansions, filepath, overwrites, required_keys, defaults, formats)
+
+ self.assertEqual(filepath_config["key1"], config_options.key1)
+
def test_run_tests_task(self):
config_options = under_test.SelectedTestsConfigOptions(
{"name_of_generating_task": "my_task_gen"}, {}, {}, {})
@@ -83,6 +102,17 @@ class TestSelectedTestsConfigOptions(unittest.TestCase):
self.assertEqual(config_options.run_tests_build_id, "my_build_id")
+ def test_create_misc_suite_with_no_selected_tests_to_run(self):
+ config_options = under_test.SelectedTestsConfigOptions({}, {}, {}, {})
+
+ self.assertTrue(config_options.create_misc_suite)
+
+ def test_create_misc_suite_with_selected_tests_to_run(self):
+ config_options = under_test.SelectedTestsConfigOptions(
+ {"selected_tests_to_run": {"my_test.js"}}, {}, {}, {})
+
+ self.assertFalse(config_options.create_misc_suite)
+
@patch(ns("read_config"))
def test_generate_display_task(self, read_config_mock):
config_options = under_test.SelectedTestsConfigOptions(
@@ -95,7 +125,7 @@ class TestSelectedTestsConfigOptions(unittest.TestCase):
self.assertIn("task_2", display_task.to_map()["execution_tasks"])
-class TestFindRelatedTestFiles(unittest.TestCase):
+class TestFindSelectedTestFiles(unittest.TestCase):
@patch(ns("is_file_a_test_file"))
@patch(ns("SelectedTestsService"))
def test_related_files_returned_from_selected_tests_service(self, selected_tests_service_mock,
@@ -113,8 +143,8 @@ class TestFindRelatedTestFiles(unittest.TestCase):
},
]
- related_test_files = under_test._find_related_test_files(selected_tests_service_mock,
- changed_files)
+ related_test_files = under_test._find_selected_test_files(selected_tests_service_mock,
+ changed_files)
self.assertEqual(related_test_files, {"jstests/file-1.js", "jstests/file-3.js"})
@@ -135,8 +165,8 @@ class TestFindRelatedTestFiles(unittest.TestCase):
},
]
- related_test_files = under_test._find_related_test_files(selected_tests_service_mock,
- changed_files)
+ related_test_files = under_test._find_selected_test_files(selected_tests_service_mock,
+ changed_files)
self.assertEqual(related_test_files, set())
@@ -145,21 +175,97 @@ class TestFindRelatedTestFiles(unittest.TestCase):
selected_tests_service_mock.get_test_mappings.return_value = set()
changed_files = {"src/file1.cpp", "src/file2.js"}
- related_test_files = under_test._find_related_test_files(selected_tests_service_mock,
- changed_files)
+ related_test_files = under_test._find_selected_test_files(selected_tests_service_mock,
+ changed_files)
self.assertEqual(related_test_files, set())
+class TestFindSelectedTasks(unittest.TestCase):
+ @patch(ns("SelectedTestsService"))
+ def test_related_tasks_returned_from_selected_tests_service(self, selected_tests_service_mock):
+ selected_tests_service_mock.get_task_mappings.return_value = [
+ {
+ "source_file": "src/file1.cpp",
+ "tasks": [{"name": "my_task_1"}],
+ },
+ {
+ "source_file": "src/file2.cpp",
+ "tasks": [{"name": "my_task_2"}],
+ },
+ ]
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ build_variant_conf = MagicMock()
+ build_variant_conf.get_task.return_value = _evergreen.Task({"name": "my_task_1"})
+
+ related_tasks = under_test._find_selected_tasks(selected_tests_service_mock, changed_files,
+ build_variant_conf)
+
+ self.assertEqual(related_tasks, {"my_task_1"})
+
+ @patch(ns("SelectedTestsService"))
+ def test_returned_tasks_do_not_exist(self, selected_tests_service_mock):
+ selected_tests_service_mock.get_task_mappings.return_value = [
+ {
+ "source_file": "src/file1.cpp",
+ "tasks": [{"name": "my_task_1"}],
+ },
+ ]
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ build_variant_conf = MagicMock()
+ build_variant_conf.get_task.return_value = None
+
+ related_tasks = under_test._find_selected_tasks(selected_tests_service_mock, changed_files,
+ build_variant_conf)
+
+ self.assertEqual(related_tasks, set())
+
+ @patch(ns("SelectedTestsService"))
+ def test_returned_tasks_should_be_excluded(self, selected_tests_service_mock):
+ excluded_task = under_test.EXCLUDE_TASK_LIST[0]
+ selected_tests_service_mock.get_task_mappings.return_value = [
+ {
+ "source_file": "src/file1.cpp",
+ "tasks": [{"name": excluded_task}],
+ },
+ ]
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ build_variant_conf = MagicMock()
+ build_variant_conf.get_task.return_value = _evergreen.Task({"name": excluded_task})
+
+ related_tasks = under_test._find_selected_tasks(selected_tests_service_mock, changed_files,
+ build_variant_conf)
+
+ self.assertEqual(related_tasks, set())
+
+ @patch(ns("SelectedTestsService"))
+ def test_returned_tasks_match_excluded_pattern(self, selected_tests_service_mock):
+ task_that_matches_exclude_pattern = "compile_all"
+ selected_tests_service_mock.get_task_mappings.return_value = [
+ {
+ "source_file": "src/file1.cpp",
+ "tasks": [{"name": task_that_matches_exclude_pattern}],
+ },
+ ]
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ build_variant_conf = MagicMock()
+ build_variant_conf.get_task.return_value = _evergreen.Task(
+ {"name": task_that_matches_exclude_pattern})
+
+ related_tasks = under_test._find_selected_tasks(selected_tests_service_mock, changed_files,
+ build_variant_conf)
+
+ self.assertEqual(related_tasks, set())
+
+
class TestGetSelectedTestsTaskConfiguration(unittest.TestCase):
@patch(ns("read_config"))
def test_gets_values(self, read_config_mock):
- filepath = MagicMock()
read_config_mock.read_config_file.return_value = {
"task_name": "my_task", "build_variant": "my-build-variant", "build_id": "my_build_id"
}
- selected_tests_task_config = under_test._get_selected_tests_task_configuration(filepath)
+ selected_tests_task_config = under_test._get_selected_tests_task_config(MagicMock())
self.assertEqual(selected_tests_task_config["name_of_generating_task"], "my_task")
self.assertEqual(selected_tests_task_config["name_of_generating_build_variant"],
@@ -167,10 +273,14 @@ class TestGetSelectedTestsTaskConfiguration(unittest.TestCase):
self.assertEqual(selected_tests_task_config["name_of_generating_build_id"], "my_build_id")
-class TestGetEvgTaskConfiguration(unittest.TestCase):
- def test_task_is_a_generate_resmoke_task(self):
+class TestGetEvgTaskConfig(unittest.TestCase):
+ @patch(ns("_get_selected_tests_task_config"))
+ def test_task_is_a_generate_resmoke_task(self, selected_tests_config_mock):
+ selected_tests_config_mock.return_value = {"selected_tests_key": "selected_tests_value"}
task_name = "auth_gen"
- task = _evergreen.Task({
+ build_variant_conf = MagicMock()
+ build_variant_conf.name = "variant"
+ build_variant_conf.get_task.return_value = _evergreen.Task({
"name":
task_name,
"commands": [{
@@ -181,26 +291,26 @@ class TestGetEvgTaskConfiguration(unittest.TestCase):
},
}],
})
- burn_in_task_config = tests_by_task_stub()[task_name]
- evg_conf_mock = MagicMock()
- evg_conf_mock.get_variant.return_value.get_task.return_value = task
- evg_task_config = under_test._get_evg_task_configuration(evg_conf_mock, "variant",
- task_name, burn_in_task_config)
+ evg_task_config = under_test._get_evg_task_config(MagicMock(), task_name,
+ build_variant_conf)
self.assertEqual(evg_task_config["task_name"], task_name)
self.assertEqual(evg_task_config["build_variant"], "variant")
- self.assertEqual(evg_task_config["selected_tests_to_run"], {"jstests/auth/auth3.js"})
self.assertIsNone(evg_task_config.get("suite"))
self.assertEqual(
evg_task_config["resmoke_args"],
"--storageEngine=wiredTiger",
)
self.assertEqual(evg_task_config["fallback_num_sub_suites"], "4")
+ self.assertEqual(evg_task_config["selected_tests_key"], "selected_tests_value")
- def test_task_is_not_a_generate_resmoke_task(self):
+ @patch(ns("_get_selected_tests_task_config"))
+ def test_task_is_not_a_generate_resmoke_task(self, selected_tests_config_mock):
task_name = "jsCore_auth"
- task = _evergreen.Task({
+ build_variant_conf = MagicMock()
+ build_variant_conf.name = "variant"
+ build_variant_conf.get_task.return_value = _evergreen.Task({
"name":
task_name,
"commands": [{
@@ -208,18 +318,12 @@ class TestGetEvgTaskConfiguration(unittest.TestCase):
"vars": {"resmoke_args": "--suites=core_auth --storageEngine=wiredTiger"}
}],
})
- burn_in_task_config = tests_by_task_stub()[task_name]
- evg_conf_mock = MagicMock()
- evg_conf_mock.get_variant.return_value.get_task.return_value = task
- evg_task_config = under_test._get_evg_task_configuration(evg_conf_mock, "variant",
- task_name, burn_in_task_config)
+ evg_task_config = under_test._get_evg_task_config(MagicMock(), task_name,
+ build_variant_conf)
self.assertEqual(evg_task_config["task_name"], task_name)
self.assertEqual(evg_task_config["build_variant"], "variant")
- self.assertEqual(
- evg_task_config["selected_tests_to_run"],
- {"jstests/core/currentop_waiting_for_latch.js", "jstests/core/latch_analyzer.js"})
self.assertEqual(evg_task_config["suite"], "core_auth")
self.assertEqual(
evg_task_config["resmoke_args"],
@@ -228,55 +332,136 @@ class TestGetEvgTaskConfiguration(unittest.TestCase):
self.assertEqual(evg_task_config["fallback_num_sub_suites"], "1")
-class TestGenerateShrubConfig(unittest.TestCase):
- @patch(ns("_get_selected_tests_task_configuration"))
- @patch(ns("_get_evg_task_configuration"))
+class TestUpdateConfigDictWithTask(unittest.TestCase):
+ @patch(ns("SelectedTestsConfigOptions"))
+ @patch(ns("GenerateSubSuites"))
+ def test_suites_and_tasks_are_generated(self, generate_subsuites_mock,
+ selected_tests_config_options_mock):
+ suites_config_mock = {"my_suite_0.yml": "suite file contents"}
+ generate_subsuites_mock.return_value.generate_suites_config.return_value = suites_config_mock
+
+ def generate_task_config(shrub_config, suites):
+ shrub_config.task("my_fake_task")
+
+ generate_subsuites_mock.return_value.generate_task_config.side_effect = generate_task_config
+
+ shrub_config = Configuration()
+ config_dict_of_suites_and_tasks = {}
+ under_test._update_config_with_task(
+ evg_api=MagicMock(), shrub_config=shrub_config, config_options=MagicMock(),
+ config_dict_of_suites_and_tasks=config_dict_of_suites_and_tasks)
+
+ self.assertEqual(config_dict_of_suites_and_tasks, suites_config_mock)
+ self.assertIn("my_fake_task", shrub_config.to_json())
+
@patch(ns("SelectedTestsConfigOptions"))
@patch(ns("GenerateSubSuites"))
- def test_when_test_by_task_returned(
- self, generate_subsuites_mock, selected_tests_config_options_mock,
- get_evg_task_configuration_mock, get_selected_tests_task_configuration_mock):
- evg_api = MagicMock()
- evg_conf = MagicMock()
- expansion_file = MagicMock()
+ def test_no_suites_or_tasks_are_generated(self, generate_subsuites_mock,
+ selected_tests_config_options_mock):
+ generate_subsuites_mock.return_value.generate_suites_config.return_value = {}
+
+ def generate_task_config(shrub_config, suites):
+ pass
+
+ generate_subsuites_mock.return_value.generate_task_config.side_effect = generate_task_config
+
+ shrub_config = Configuration()
+ config_dict_of_suites_and_tasks = {}
+ under_test._update_config_with_task(
+ evg_api=MagicMock(), shrub_config=shrub_config, config_options=MagicMock(),
+ config_dict_of_suites_and_tasks=config_dict_of_suites_and_tasks)
+
+ self.assertEqual(config_dict_of_suites_and_tasks, {})
+ self.assertEqual(shrub_config.to_json(), "{}")
+
+
+class TestGetTaskConfigsForTestMappings(unittest.TestCase):
+ @patch(ns("_get_evg_task_config"))
+ def test_get_config_for_test_mapping(self, get_evg_task_config_mock):
tests_by_task = tests_by_task_stub()
- yml_suite_file_contents = MagicMock()
- shrub_json_file_contents = MagicMock()
- suite_file_dict_mock = {"auth_0.yml": yml_suite_file_contents}
- generate_subsuites_mock.return_value.generate_task_config_and_suites.return_value = (
- suite_file_dict_mock,
- shrub_json_file_contents,
- )
+ get_evg_task_config_mock.side_effect = [{"task_config_key": "task_config_value_1"},
+ {"task_config_key": "task_config_value_2"}]
+
+ task_configs = under_test._get_task_configs_for_test_mappings(
+ expansion_file=MagicMock(), tests_by_task=tests_by_task,
+ build_variant_config=MagicMock())
- config_file_dict = under_test._generate_shrub_config(evg_api, evg_conf, expansion_file,
- tests_by_task, "variant")
+ self.assertEqual(task_configs["jsCore_auth"]["task_config_key"], "task_config_value_1")
self.assertEqual(
- config_file_dict,
- {
- "auth_0.yml": yml_suite_file_contents,
- "selected_tests_config.json": shrub_json_file_contents,
- },
- )
+ task_configs["jsCore_auth"]["selected_tests_to_run"],
+ {"jstests/core/currentop_waiting_for_latch.js", "jstests/core/latch_analyzer.js"})
+ self.assertEqual(task_configs["auth_gen"]["task_config_key"], "task_config_value_2")
+ self.assertEqual(task_configs["auth_gen"]["selected_tests_to_run"],
+ {'jstests/auth/auth3.js'})
+
+
+class TestGetTaskConfigsForTaskMappings(unittest.TestCase):
+ @patch(ns("_get_evg_task_config"))
+ def test_get_config_for_task_mapping(self, get_evg_task_config_mock):
+ tasks = ["task_1", "task_2"]
+ get_evg_task_config_mock.side_effect = [{"task_config_key": "task_config_value_1"},
+ {"task_config_key": "task_config_value_2"}]
+ task_configs = under_test._get_task_configs_for_task_mappings(
+ expansion_file=MagicMock(), related_tasks=tasks, build_variant_config=MagicMock())
- @patch(ns("_get_selected_tests_task_configuration"))
- @patch(ns("_get_evg_task_configuration"))
+ self.assertEqual(task_configs["task_1"]["task_config_key"], "task_config_value_1")
+ self.assertEqual(task_configs["task_2"]["task_config_key"], "task_config_value_2")
+
+
+class TestRun(unittest.TestCase):
@patch(ns("SelectedTestsConfigOptions"))
- @patch(ns("GenerateSubSuites"))
- def test_when_no_test_by_task_returned(
- self, generate_subsuites_mock, selected_tests_config_options_mock,
- get_evg_task_configuration_mock, get_selected_tests_task_configuration_mock):
- evg_api = MagicMock()
- evg_conf = MagicMock()
- expansion_file = MagicMock()
- tests_by_task = {}
- yml_suite_file_contents = MagicMock()
- shrub_json_file_contents = MagicMock()
- suite_file_dict_mock = {"auth_0.yml": yml_suite_file_contents}
- generate_subsuites_mock.return_value.generate_task_config_and_suites.return_value = (
- suite_file_dict_mock,
- shrub_json_file_contents,
- )
+ @patch(ns("_find_selected_test_files"))
+ @patch(ns("create_task_list_for_tests"))
+ @patch(ns("_get_task_configs_for_test_mappings"))
+ @patch(ns("_find_selected_tasks"))
+ @patch(ns("_update_config_with_task"))
+ # pylint: disable=too-many-arguments
+ def test_run_with_related_tests_but_no_related_tasks(
+ self, update_config_with_task_mock, find_selected_tasks_mock,
+ get_task_configs_for_test_mappings_mock, create_task_list_for_tests_mock,
+ find_selected_test_files_mock, selected_tests_config_options):
+ find_selected_test_files_mock.return_value = {"jstests/file-1.js", "jstests/file-3.js"}
+ get_task_configs_for_test_mappings_mock.return_value = {
+ "task_config_key": "task_config_value_1"
+ }
+ find_selected_tasks_mock.return_value = set()
+
+ def update_config_with_task(evg_api, shrub_config, config_options,
+ config_dict_of_suites_and_tasks):
+ config_dict_of_suites_and_tasks["new_config_key"] = "new_config_values"
+
+ update_config_with_task_mock.side_effect = update_config_with_task
+
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ config_dict_of_suites_and_tasks = under_test.run(MagicMock(), MagicMock(), MagicMock(),
+ MagicMock(), changed_files, "variant")
+
+ self.assertEqual(config_dict_of_suites_and_tasks["new_config_key"], "new_config_values")
+
+ @patch(ns("SelectedTestsConfigOptions"))
+ @patch(ns("_find_selected_test_files"))
+ @patch(ns("_get_task_configs_for_task_mappings"))
+ @patch(ns("_find_selected_tasks"))
+ @patch(ns("_update_config_with_task"))
+ # pylint: disable=too-many-arguments
+ def test_run_with_related_tasks_but_no_related_tests(
+ self, update_config_with_task_mock, find_selected_tasks_mock,
+ get_task_configs_for_task_mappings_mock, find_selected_test_files_mock,
+ selected_tests_config_options):
+ find_selected_test_files_mock.return_value = {}
+ get_task_configs_for_task_mappings_mock.return_value = {
+ "task_config_key": "task_config_value_1"
+ }
+ find_selected_tasks_mock.return_value = {"jsCore_auth", "auth_gen"}
+
+ def update_config_with_task(evg_api, shrub_config, config_options,
+ config_dict_of_suites_and_tasks):
+ config_dict_of_suites_and_tasks["new_config_key"] = "new_config_values"
+
+ update_config_with_task_mock.side_effect = update_config_with_task
+
+ changed_files = {"src/file1.cpp", "src/file2.js"}
+ config_dict_of_suites_and_tasks = under_test.run(MagicMock(), MagicMock(), MagicMock(),
+ MagicMock(), changed_files, "variant")
- config_file_dict = under_test._generate_shrub_config(evg_api, evg_conf, expansion_file,
- tests_by_task, "variant")
- self.assertEqual(config_file_dict, {})
+ self.assertEqual(config_dict_of_suites_and_tasks["new_config_key"], "new_config_values")
diff --git a/etc/evergreen.yml b/etc/evergreen.yml
index 8ec4ebb293a..2710492bebe 100644
--- a/etc/evergreen.yml
+++ b/etc/evergreen.yml
@@ -1537,7 +1537,7 @@ functions:
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
- source_dir: src/generated_resmoke_config
+ source_dir: src/selected_tests_config
include:
- "*"
- command: s3.put
@@ -1555,7 +1555,7 @@ functions:
params:
optional: true
files:
- - src/generated_resmoke_config/*.json
+ - src/selected_tests_config/*.json
"generate burn in tags":
- command: expansions.write
@@ -6219,26 +6219,22 @@ tasks:
vars:
resmoke_args: --suites=mongos_test
-- <<: *task_template
- name: multiversion_auth
+- name: multiversion_auth_gen
tags: ["auth", "multiversion"]
commands:
- - func: "do setup"
- - func: "do multiversion setup"
- - func: "run tests"
+ - func: "generate resmoke tasks"
vars:
- task_path_suffix: /data/multiversion
- resmoke_args: --suites=multiversion_auth --storageEngine=wiredTiger
+ resmoke_args: "--storageEngine=wiredTiger"
+ use_multiversion: /data/multiversion
+ fallback_num_sub_suites: 1
-- <<: *task_template
- name: multiversion
+- name: multiversion_gen
commands:
- - func: "do setup"
- - func: "do multiversion setup"
- - func: "run tests"
+ - func: "generate resmoke tasks"
vars:
- task_path_suffix: /data/multiversion
- resmoke_args: --suites=multiversion --storageEngine=wiredTiger
+ resmoke_args: "--storageEngine=wiredTiger"
+ use_multiversion: /data/multiversion
+ fallback_num_sub_suites: 1
- name: unittest_shell_hang_analyzer_gen
tags: ["misc_js", "sharded"]
@@ -8607,7 +8603,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: replica_sets_jscore_passthrough
- name: .sharding .txns
@@ -8715,7 +8711,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: .powercycle
distros:
- ubuntu1604-powercycle
@@ -9090,7 +9086,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
@@ -9199,7 +9195,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
@@ -9504,8 +9500,8 @@ buildvariants:
- name: jsonSchema
- name: .jstestfuzz !.initsync
- name: .logical_session_cache .one_sec
- - name: multiversion
- - name: multiversion_auth
+ - name: multiversion_gen
+ - name: multiversion_auth_gen
- name: .powercycle
- name: .query_fuzzer
- name: .read_write_concern
@@ -9785,7 +9781,7 @@ buildvariants:
- name: mqlrun
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- - name: multiversion
+ - name: multiversion_gen
- name: .query_fuzzer
- name: .read_write_concern .large
distros:
@@ -9871,8 +9867,8 @@ buildvariants:
- name: .logical_session_cache .repl
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- - name: multiversion_auth
- - name: multiversion
+ - name: multiversion_auth_gen
+ - name: multiversion_gen
- name: noPassthrough_gen
- name: .replica_sets !.multi_oplog !.large
- name: .replica_sets !.multi_oplog .large
@@ -9945,7 +9941,7 @@ buildvariants:
- name: jsonSchema
- name: .jstestfuzz !.flow_control
- name: .logical_session_cache .repl
- - name: multiversion
+ - name: multiversion_gen
- name: .random_multiversion_replica_sets
- name: .read_write_concern .large
distros:
@@ -10059,7 +10055,7 @@ buildvariants:
- name: .multi_shard .common
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: multi_shard_multi_stmt_txn_kill_primary_jscore_passthrough_gen
- - name: multiversion
+ - name: multiversion_gen
- name: .read_write_concern !.aggregation
distros:
- rhel62-large
@@ -10192,7 +10188,7 @@ buildvariants:
- name: .jstestfuzz_multiversion_gen
- name: .logical_session_cache .one_sec
- name: .multi_shard .common
- - name: multiversion
+ - name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .query_fuzzer
@@ -10475,7 +10471,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
@@ -10523,7 +10519,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
@@ -10574,7 +10570,7 @@ buildvariants:
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
@@ -11100,7 +11096,7 @@ buildvariants:
- name: free_monitoring
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- - name: multiversion
+ - name: multiversion_gen
- name: .logical_session_cache .one_sec
- name: replica_sets
- name: .replica_sets .common
@@ -11184,7 +11180,7 @@ buildvariants:
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
@@ -11284,7 +11280,7 @@ buildvariants:
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
@@ -11383,7 +11379,7 @@ buildvariants:
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
@@ -11552,7 +11548,7 @@ buildvariants:
- name: .logical_session_cache
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- - name: multiversion
+ - name: multiversion_gen
- name: .query_fuzzer
- name: .read_write_concern .large
distros:
@@ -11648,7 +11644,7 @@ buildvariants:
- name: .logical_session_cache
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- - name: multiversion
+ - name: multiversion_gen
- name: .query_fuzzer
- name: .read_write_concern .large
distros:
@@ -11860,7 +11856,7 @@ buildvariants:
- name: .jstestfuzz_multiversion_gen
- name: .logical_session_cache
- name: .multi_shard .common
- - name: multiversion
+ - name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .query_fuzzer
@@ -11951,7 +11947,7 @@ buildvariants:
- name: .jstestfuzz_multiversion_gen
- name: .logical_session_cache .one_sec
- name: .multi_shard .common
- - name: multiversion
+ - name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .random_multiversion_replica_sets
@@ -12058,7 +12054,7 @@ buildvariants:
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: .logical_session_cache .one_sec
- - name: multiversion
+ - name: multiversion_gen
- name: .read_write_concern
- name: .replica_sets .san
- name: .read_only