summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2020-08-10 10:26:00 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2020-08-10 10:26:00 +0000
commite3cb8b5b0a33c4b0c5333ea714d4722daba0c248 (patch)
tree5c933947f28c93a79bf132f54ab363ac1d7d6413
parent9351fbda2dfeda27a420eeeb176aa70c70331b44 (diff)
parent97eac15d341ac26cf16c019719edec2078058045 (diff)
downloadbuildstream-e3cb8b5b0a33c4b0c5333ea714d4722daba0c248.tar.gz
Merge branch 'tristan/fix-redundant-session-headings' into 'master'
Refactor session headings in the frontend Closes #1369 See merge request BuildStream/buildstream!2009
-rw-r--r--NEWS10
-rw-r--r--doc/source/format_project.rst24
-rw-r--r--src/buildstream/_context.py7
-rw-r--r--src/buildstream/_frontend/cli.py2
-rw-r--r--src/buildstream/_frontend/widget.py122
-rw-r--r--src/buildstream/_loader/loadcontext.py27
-rw-r--r--src/buildstream/_pluginfactory/pluginfactory.py18
-rw-r--r--src/buildstream/_pluginfactory/pluginorigin.py1
-rw-r--r--src/buildstream/_pluginfactory/pluginoriginjunction.py13
-rw-r--r--src/buildstream/_pluginfactory/pluginoriginlocal.py11
-rw-r--r--src/buildstream/_pluginfactory/pluginoriginpip.py6
-rw-r--r--src/buildstream/_project.py89
-rw-r--r--src/buildstream/_stream.py28
-rw-r--r--src/buildstream/_workspaces.py2
-rw-r--r--src/buildstream/_yaml.pyx2
-rw-r--r--src/buildstream/element.py4
-rw-r--r--src/buildstream/testing/__init__.py3
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py4
-rw-r--r--src/buildstream/testing/_sourcetests/track.py2
-rw-r--r--src/buildstream/testing/_yaml.py9
-rw-r--r--src/buildstream/testing/runcli.py2
-rw-r--r--src/buildstream/types.py18
-rw-r--r--tests/artifactcache/junctions.py2
-rw-r--r--tests/elements/filter.py14
-rw-r--r--tests/format/include_composition.py16
-rw-r--r--tests/frontend/init.py10
-rw-r--r--tests/frontend/interactive_init.py2
-rw-r--r--tests/frontend/workspace.py14
-rw-r--r--tests/internals/yaml.py34
29 files changed, 338 insertions, 158 deletions
diff --git a/NEWS b/NEWS
index 8c97dc6e0..babcb259b 100644
--- a/NEWS
+++ b/NEWS
@@ -2,6 +2,16 @@
(unreleased)
============
+Format
+------
+
+ o BREAKING CHANGE: Some project.conf keys are no longer allowed to be specified outside
+ of the project.conf file (they cannot be specified in an include file), these include:
+ - name
+ - element-path
+ - min-version
+ - plugins
+
CLI
---
diff --git a/doc/source/format_project.rst b/doc/source/format_project.rst
index ffec5e176..0216e524a 100644
--- a/doc/source/format_project.rst
+++ b/doc/source/format_project.rst
@@ -36,10 +36,13 @@ of your project.
name: my-project-name
-.. note::
+The project name may contain alphanumeric characters, dashes and
+underscores, and may not start with a leading digit.
- The project name may contain alphanumeric characters, dashes and
- underscores, and may not start with a leading digit.
+.. attention::
+
+ The project name must be specified in the ``project.conf`` and
+ cannot be :ref:`included <format_directives_include>` from a separate file.
.. _project_min_version:
@@ -78,6 +81,11 @@ it has been there from the beginning.
plugins also implement their own YAML configuration fragments and as
such are revisioned separately from the core format.
+.. attention::
+
+ The ``min-version`` must be specified in the ``project.conf`` and
+ cannot be :ref:`included <format_directives_include>` from a separate file.
+
.. _project_element_path:
@@ -94,6 +102,11 @@ allows the user to specify a project subdirectory where element
Note that elements are referred to by their relative paths, whenever
elements are referred to in a ``.bst`` file or on the command line.
+.. attention::
+
+ The ``element-path`` can only be specified in the ``project.conf`` and
+ cannot be :ref:`included <format_directives_include>` from a separate file.
+
.. _project_format_ref_storage:
@@ -383,6 +396,11 @@ of the plugins it means to make use of and the origin from which they can be loa
Note that plugins with the same name from different origins are not permitted.
+.. attention::
+
+ The plugins can only be specified in the ``project.conf`` and cannot be
+ :ref:`included <format_directives_include>` from a separate file.
+
.. _project_plugins_local:
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index c0e92b98e..8b559153e 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -233,11 +233,14 @@ class Context:
# Load default config
#
- defaults = _yaml.load(_site.default_user_config)
+ defaults = _yaml.load(_site.default_user_config, shortname="userconfig.yaml")
if config:
self.config_origin = os.path.abspath(config)
- user_config = _yaml.load(config)
+
+ # Here we use the fullpath as the shortname as well, as it is useful to have
+ # a fullpath displayed in errors for the user configuration
+ user_config = _yaml.load(config, shortname=config)
user_config._composite(defaults)
# Give obsoletion warnings
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index ff68af66d..20914f15d 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -120,7 +120,7 @@ def complete_target(args, incomplete):
else:
project_file = os.path.join(base_directory, project_conf)
try:
- project = _yaml.load(project_file)
+ project = _yaml.load(project_file, shortname=project_conf)
except LoadError:
# If there is no project conf in context, just dont
# even bother trying to complete anything.
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 5038e9d6b..a4268f62b 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -439,11 +439,11 @@ class LogLine(Widget):
# and so on.
#
# Args:
- # project (Project): The toplevel project we were invoked from
+ # toplevel_project (Project): The toplevel project we were invoked from
# stream (Stream): The stream
# log_file (file): An optional file handle for additional logging
#
- def print_heading(self, project, stream, *, log_file):
+ def print_heading(self, toplevel_project, stream, *, log_file):
context = self.context
starttime = datetime.datetime.now()
text = ""
@@ -455,7 +455,7 @@ class LogLine(Widget):
text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
values = OrderedDict()
values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
- values["Project"] = "{} ({})".format(project.name, project.directory)
+ values["Project"] = "{} ({})".format(toplevel_project.name, toplevel_project.directory)
values["Targets"] = ", ".join([t.name for t in stream.targets])
text += self._format_values(values)
@@ -476,23 +476,54 @@ class LogLine(Widget):
text += self._format_values(values)
text += "\n"
- # Project Options
- values = OrderedDict()
- project.options.printable_variables(values)
- if values:
- text += self.content_profile.fmt("Project Options\n", bold=True)
+ # Print information about each loaded project
+ #
+ for project_info in toplevel_project.loaded_projects():
+ project = project_info.project
+
+ # Project title line
+ text += (
+ self.content_profile.fmt("Project", bold=True)
+ + self.format_profile.fmt(": ", bold=True)
+ + self.content_profile.fmt(project.name, bold=True)
+ )
+ text += "\n"
+
+ # Details on how the project was loaded
+ #
+ values = OrderedDict()
+ if project.junction:
+ values["Junction path"] = project_info.project.junction._get_full_name()
+ if project_info.provenance:
+ values["Loaded by"] = str(project_info.provenance)
text += self._format_values(values)
+
+ # Print out duplicate declarations
+ if project_info.duplicates:
+ text += self.format_profile.fmt("{}Declared duplicate by:\n".format(self._indent))
+ for duplicate in project_info.duplicates:
+ text += self.content_profile.fmt("{}{}\n".format(self._indent * 2, duplicate))
+
+ # Print out internal declarations
+ if project_info.internal:
+ text += self.format_profile.fmt("{}Declared internal by:\n".format(self._indent))
+ for internal in project_info.internal:
+ text += self.content_profile.fmt("{}{}\n".format(self._indent * 2, internal))
+
text += "\n"
- # Plugins
- text += self._format_plugins(
- [p for p, _, _ in project.first_pass_config.element_factory.list_plugins()],
- [p for p, _, _ in project.first_pass_config.source_factory.list_plugins()],
- )
- if project.config.element_factory and project.config.source_factory:
+ # Project Options
+ values = OrderedDict()
+ project.options.printable_variables(values)
+ if values:
+ text += self.format_profile.fmt("{}Project Options\n".format(self._indent))
+ text += self._format_values(values, indent=2)
+ text += "\n"
+
+ # Plugins
text += self._format_plugins(
- [p for p, _, _ in project.config.element_factory.list_plugins()],
- [p for p, _, _ in project.config.source_factory.list_plugins()],
+ {p: d for p, _, _, d in project.element_factory.list_plugins()},
+ {p: d for p, _, _, d in project.source_factory.list_plugins()},
)
# Pipeline state
@@ -731,25 +762,27 @@ class LogLine(Widget):
lines = log[(end + 1) :].splitlines()
return "\n".join([line.decode("utf-8") for line in lines]).rstrip()
+ # _format_plugins()
+ #
+ # Formats the plugins loaded by a project
+ #
+ # Args:
+ # element_plugins (dict): Dict of element plugin kind and display string tuples
+ # source_plugins (dict): Dict of source plugin kind and display string tuples
+ #
+ # Returns:
+ # (str): The formatted text
+ #
def _format_plugins(self, element_plugins, source_plugins):
text = ""
-
- if not (element_plugins or source_plugins):
- return text
-
- text += self.content_profile.fmt("Loaded Plugins\n", bold=True)
-
if element_plugins:
- text += self.format_profile.fmt(" Element Plugins\n")
- for plugin in element_plugins:
- text += self.content_profile.fmt(" - {}\n".format(plugin))
-
+ text += self.format_profile.fmt("{}Element Plugins\n".format(self._indent))
+ text += self._format_values(element_plugins, style_key=True, indent=2)
+ text += "\n"
if source_plugins:
- text += self.format_profile.fmt(" Source Plugins\n")
- for plugin in source_plugins:
- text += self.content_profile.fmt(" - {}\n".format(plugin))
-
- text += "\n"
+ text += self.format_profile.fmt("{}Source Plugins\n".format(self._indent))
+ text += self._format_values(source_plugins, style_key=True, indent=2)
+ text += "\n"
return text
@@ -759,25 +792,40 @@ class LogLine(Widget):
# the values are aligned.
#
# Args:
- # values: A dictionary, usually an OrderedDict()
- # style_value: Whether to use the content profile for the values
+ # values (dict): A dictionary, usually an OrderedDict()
+ # style_key (bool): Whether to use the content profile for the keys
+ # style_value (bool): Whether to use the content profile for the values
+ # indent (number): Number of initial indentation levels
#
# Returns:
# (str): The formatted values
#
- def _format_values(self, values, style_value=True):
+ def _format_values(self, values, *, style_key=False, style_value=True, indent=1):
text = ""
max_key_len = 0
for key, value in values.items():
max_key_len = max(len(key), max_key_len)
for key, value in values.items():
+
+ key = str(key)
+ text += self._indent * indent
+ if style_key:
+ text += self.content_profile.fmt(key)
+ else:
+ text += self.format_profile.fmt(key)
+ text += self.format_profile.fmt(":")
+
+ # Special case for values containing newlines
if isinstance(value, str) and "\n" in value:
- text += self.format_profile.fmt(" {}:\n".format(key))
- text += textwrap.indent(value, self._indent)
+ text += "\n"
+ text += textwrap.indent(value, self._indent * indent)
continue
- text += self.format_profile.fmt(" {}: {}".format(key, " " * (max_key_len - len(key))))
+ # Alignment spacing
+ text += " {}".format(" " * (max_key_len - len(key)))
+
+ # Print the value
if style_value:
text += self.content_profile.fmt(str(value))
else:
diff --git a/src/buildstream/_loader/loadcontext.py b/src/buildstream/_loader/loadcontext.py
index 6183a192b..4e6c9bca6 100644
--- a/src/buildstream/_loader/loadcontext.py
+++ b/src/buildstream/_loader/loadcontext.py
@@ -19,6 +19,7 @@
from .._exceptions import LoadError
from ..exceptions import LoadErrorReason
+from ..types import _ProjectInformation
# ProjectLoaders()
@@ -74,6 +75,21 @@ class ProjectLoaders:
elif primary and duplicates:
self._raise_conflict(duplicates, internal)
+ # loaded_projects()
+ #
+ # A generator which yeilds all of the instances
+ # of this loaded project.
+ #
+ # Yields:
+ # (_ProjectInformation): A descriptive project information object
+ #
+ def loaded_projects(self):
+ for loader in self._collect:
+ duplicating, internalizing = self._search_project_relationships(loader)
+ yield _ProjectInformation(
+ loader.project, loader.provenance, [str(l) for l in duplicating], [str(l) for l in internalizing]
+ )
+
# _search_project_relationships()
#
# Searches this loader's ancestry for projects which mark this
@@ -241,3 +257,14 @@ class LoadContext:
self._loaders[project.name] = project_loaders
project_loaders.register_loader(loader)
+
+ # loaded_projects()
+ #
+ # A generator which yeilds all of the loaded projects
+ #
+ # Yields:
+ # (_ProjectInformation): A descriptive project information object
+ #
+ def loaded_projects(self):
+ for _, project_loaders in self._loaders.items():
+ yield from project_loaders.loaded_projects()
diff --git a/src/buildstream/_pluginfactory/pluginfactory.py b/src/buildstream/_pluginfactory/pluginfactory.py
index df950abee..f997d9017 100644
--- a/src/buildstream/_pluginfactory/pluginfactory.py
+++ b/src/buildstream/_pluginfactory/pluginfactory.py
@@ -165,10 +165,11 @@ class PluginFactory:
# (str): The plugin kind
# (type): The loaded plugin type
# (str): The default yaml file, if any
+ # (str): The display string describing how the plugin was loaded
#
- def list_plugins(self) -> Iterator[Tuple[str, Type[Plugin], str]]:
- for kind, (plugin_type, defaults) in self._types.items():
- yield kind, plugin_type, defaults
+ def list_plugins(self) -> Iterator[Tuple[str, Type[Plugin], str, str]]:
+ for kind, (plugin_type, defaults, display) in self._types.items():
+ yield kind, plugin_type, defaults, display
# get_plugin_paths():
#
@@ -183,12 +184,13 @@ class PluginFactory:
# (str): The full path to the directory containing the plugin
# (str): The full path to the accompanying .yaml file containing
# the plugin's preferred defaults.
+ # (str): The explanatory display string describing how this plugin was loaded
#
def get_plugin_paths(self, kind: str):
try:
origin = self._origins[kind]
except KeyError:
- return None, None
+ return None, None, None
return origin.get_plugin_paths(kind, self._plugin_type)
@@ -220,7 +222,7 @@ class PluginFactory:
# the optional accompanying .yaml file for the plugin, should
# one have been provided.
#
- location, defaults = self.get_plugin_paths(kind)
+ location, defaults, display = self.get_plugin_paths(kind)
if location:
@@ -245,10 +247,12 @@ class PluginFactory:
defaults = os.path.join(self._site_plugins_path, "{}.yaml".format(kind))
if not os.path.exists(defaults):
defaults = None
+ display = "core plugin"
- self._types[kind] = (self._load_plugin(source, kind), defaults)
+ self._types[kind] = (self._load_plugin(source, kind), defaults, display)
- return self._types[kind]
+ type_, defaults, _ = self._types[kind]
+ return type_, defaults
# _load_plugin():
#
diff --git a/src/buildstream/_pluginfactory/pluginorigin.py b/src/buildstream/_pluginfactory/pluginorigin.py
index bd987171d..e75b8cb58 100644
--- a/src/buildstream/_pluginfactory/pluginorigin.py
+++ b/src/buildstream/_pluginfactory/pluginorigin.py
@@ -133,6 +133,7 @@ class PluginOrigin:
# (str): The full path to the directory containing the plugin
# (str): The full path to the accompanying .yaml file containing
# the plugin's preferred defaults.
+ # (str): The explanatory display string describing how this plugin was loaded
#
def get_plugin_paths(self, kind, plugin_type):
pass
diff --git a/src/buildstream/_pluginfactory/pluginoriginjunction.py b/src/buildstream/_pluginfactory/pluginoriginjunction.py
index 4e0a53cfb..c32a7956c 100644
--- a/src/buildstream/_pluginfactory/pluginoriginjunction.py
+++ b/src/buildstream/_pluginfactory/pluginoriginjunction.py
@@ -42,13 +42,13 @@ class PluginOriginJunction(PluginOrigin):
# Now get the appropriate PluginFactory object
#
if plugin_type == PluginType.SOURCE:
- factory = project.config.source_factory
+ factory = project.source_factory
elif plugin_type == PluginType.ELEMENT:
- factory = project.config.element_factory
+ factory = project.element_factory
# Now ask for the paths from the subproject PluginFactory
try:
- location, defaults = factory.get_plugin_paths(kind)
+ location, defaults, display = factory.get_plugin_paths(kind)
except PluginError as e:
# Add some context to an error raised by loading a plugin from a subproject
#
@@ -74,7 +74,12 @@ class PluginOriginJunction(PluginOrigin):
reason="junction-plugin-not-found",
)
- return location, defaults
+ # Use the resolved project path for the display string rather than the user configured junction path
+ project_path = "toplevel project"
+ if project.junction:
+ project_path = project.junction._get_full_name()
+
+ return location, defaults, "junction: {} ({})".format(project_path, display)
def load_config(self, origin_node):
diff --git a/src/buildstream/_pluginfactory/pluginoriginlocal.py b/src/buildstream/_pluginfactory/pluginoriginlocal.py
index 5cfe2fd3a..34fba09e6 100644
--- a/src/buildstream/_pluginfactory/pluginoriginlocal.py
+++ b/src/buildstream/_pluginfactory/pluginoriginlocal.py
@@ -27,21 +27,20 @@ class PluginOriginLocal(PluginOrigin):
def __init__(self):
super().__init__(PluginOriginType.LOCAL)
- # An absolute path to where plugins from this origin are found
+ # Project relative path to where plugins from this origin are found
self._path = None
def get_plugin_paths(self, kind, plugin_type):
- defaults = os.path.join(self._path, "{}.yaml".format(kind))
+ path = os.path.join(self.project.directory, self._path)
+ defaults = os.path.join(path, "{}.yaml".format(kind))
if not os.path.exists(defaults):
defaults = None
- return self._path, defaults
+ return path, defaults, "project directory: {}".format(self._path)
def load_config(self, origin_node):
origin_node.validate_keys(["path", *PluginOrigin._COMMON_CONFIG_KEYS])
path_node = origin_node.get_scalar("path")
- path = self.project.get_path_from_node(path_node, check_is_dir=True)
-
- self._path = os.path.join(self.project.directory, path)
+ self._path = self.project.get_path_from_node(path_node, check_is_dir=True)
diff --git a/src/buildstream/_pluginfactory/pluginoriginpip.py b/src/buildstream/_pluginfactory/pluginoriginpip.py
index 3a9c63f7e..013cd67f3 100644
--- a/src/buildstream/_pluginfactory/pluginoriginpip.py
+++ b/src/buildstream/_pluginfactory/pluginoriginpip.py
@@ -89,7 +89,11 @@ class PluginOriginPip(PluginOrigin):
# The plugin didn't have an accompanying YAML file
defaults = None
- return os.path.dirname(location), defaults
+ return (
+ os.path.dirname(location),
+ defaults,
+ "python package '{}' at: {}".format(package.dist, package.dist.location),
+ )
def load_config(self, origin_node):
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index b45c6c695..3562ea5d4 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -74,8 +74,6 @@ class HostMount:
# Represents project configuration that can have different values for junctions.
class ProjectConfig:
def __init__(self):
- self.element_factory = None
- self.source_factory = None
self.options = None # OptionPool
self.base_variables = {} # The base set of variables
self.element_overrides = {} # Element specific configurations
@@ -142,6 +140,9 @@ class Project:
self.source_cache_specs = None
self.remote_execution_specs = None
+ self.element_factory = None # ElementFactory for loading elements
+ self.source_factory = None # SourceFactory for loading sources
+
#
# Private Members
#
@@ -345,16 +346,12 @@ class Project:
#
# Args:
# meta (MetaElement): The loaded MetaElement
- # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# (Element): A newly created Element object of the appropriate kind
#
- def create_element(self, meta, *, first_pass=False):
- if first_pass:
- return self.first_pass_config.element_factory.create(self._context, self, meta)
- else:
- return self.config.element_factory.create(self._context, self, meta)
+ def create_element(self, meta):
+ return self.element_factory.create(self._context, self, meta)
# create_source()
#
@@ -363,16 +360,12 @@ class Project:
# Args:
# meta (MetaSource): The loaded MetaSource
# variables (Variables): The list of variables available to the source
- # first_pass (bool): Whether to use first pass configuration (for junctions)
#
# Returns:
# (Source): A newly created Source object of the appropriate kind
#
- def create_source(self, meta, variables, *, first_pass=False):
- if first_pass:
- return self.first_pass_config.source_factory.create(self._context, self, meta, variables)
- else:
- return self.config.source_factory.create(self._context, self, meta, variables)
+ def create_source(self, meta, variables):
+ return self.source_factory.create(self._context, self, meta, variables)
# get_alias_uri()
#
@@ -629,6 +622,23 @@ class Project:
return False
+ # loaded_projects()
+ #
+ # A generator which yields all the projects in context of a loaded
+ # pipeline, including the self project.
+ #
+ # Projects will be yielded in the order in which they were loaded
+ # for the current session's pipeline.
+ #
+ # This is used by the frontend to print information about all the
+ # loaded projects.
+ #
+ # Yields:
+ # (_ProjectInformation): A descriptive project information object
+ #
+ def loaded_projects(self):
+ yield from self.load_context.loaded_projects()
+
########################################################
# Private Methods #
########################################################
@@ -639,8 +649,9 @@ class Project:
#
# Args:
# node (MappingNode): The toplevel project.conf node
+ # first_pass (bool): Whether this is the first or second pass
#
- def _validate_toplevel_node(self, node):
+ def _validate_toplevel_node(self, node, *, first_pass=False):
node.validate_keys(
[
"min-version",
@@ -670,6 +681,24 @@ class Project:
]
)
+ # Keys which are invalid if specified outside of project.conf
+ if not first_pass:
+ invalid_keys = {"name", "element-path", "min-version", "plugins"}
+
+ for invalid_key in invalid_keys:
+ invalid_node = node.get_node(invalid_key, allow_none=True)
+ if invalid_node:
+ provenance = invalid_node.get_provenance()
+ if (
+ provenance._shortname != "project.conf"
+ and provenance._filename != _site.default_project_config
+ ):
+ raise LoadError(
+ "{}: Unexpected key: {}".format(provenance, invalid_key),
+ LoadErrorReason.INVALID_DATA,
+ detail="The '{}' configuration must be specified in project.conf".format(invalid_key),
+ )
+
# _validate_version()
#
# Asserts that we have a BuildStream installation which is recent
@@ -747,11 +776,11 @@ class Project:
# Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
- self._default_config_node = _yaml.load(_site.default_project_config)
+ self._default_config_node = _yaml.load(_site.default_project_config, shortname="projectconfig.yaml")
# Load project local config and override the builtin
try:
- self._project_conf = _yaml.load(projectfile)
+ self._project_conf = _yaml.load(projectfile, shortname=_PROJECT_CONF_FILE, project=self)
except LoadError as e:
# Raise a more specific error here
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -766,7 +795,7 @@ class Project:
# Assert project's minimum required version early, before validating toplevel keys
self._validate_version(pre_config_node)
- self._validate_toplevel_node(pre_config_node)
+ self._validate_toplevel_node(pre_config_node, first_pass=True)
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
@@ -818,6 +847,9 @@ class Project:
config_no_include = self._default_config_node.clone()
project_conf_first_pass._composite(config_no_include)
+ # Plugin factories must be defined in project.conf, not included from elsewhere.
+ self._load_plugin_factories(config_no_include)
+
self._load_pass(config_no_include, self.first_pass_config, ignore_unknown=True)
# Use separate file for storing source references
@@ -845,7 +877,7 @@ class Project:
self._load_pass(config, self.config)
- self._validate_toplevel_node(config)
+ self._validate_toplevel_node(config, first_pass=False)
#
# Now all YAML composition is done, from here on we just load
@@ -949,8 +981,6 @@ class Project:
#
def _load_pass(self, config, output, *, ignore_unknown=False):
- self._load_plugin_factories(config, output)
-
# Load project options
options_node = config.get_mapping("options", default={})
output.options.load(options_node)
@@ -1061,20 +1091,27 @@ class Project:
return project_directory, workspace_element
- def _load_plugin_factories(self, config, output):
+ # _load_plugin_factories()
+ #
+ # Loads the plugin factories
+ #
+ # Args:
+ # config (MappingNode): The main project.conf node in the first pass
+ #
+ def _load_plugin_factories(self, config):
# Create the factories
pluginbase = PluginBase(package="buildstream.plugins")
- output.element_factory = ElementFactory(pluginbase)
- output.source_factory = SourceFactory(pluginbase)
+ self.element_factory = ElementFactory(pluginbase)
+ self.source_factory = SourceFactory(pluginbase)
# Load the plugin origins and register them to their factories
origins = config.get_sequence("plugins", default=[])
for origin_node in origins:
origin = load_plugin_origin(self, origin_node)
for kind, conf in origin.elements.items():
- output.element_factory.register_plugin_origin(kind, origin, conf.allow_deprecated)
+ self.element_factory.register_plugin_origin(kind, origin, conf.allow_deprecated)
for kind, conf in origin.sources.items():
- output.source_factory.register_plugin_origin(kind, origin, conf.allow_deprecated)
+ self.source_factory.register_plugin_origin(kind, origin, conf.allow_deprecated)
# _warning_is_fatal():
#
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index cb1e84f74..e9bd60244 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -307,7 +307,7 @@ class Stream:
# Enqueue elements
self._enqueue_plan(elements)
- self._run()
+ self._run(announce_session=True)
# fetch()
#
@@ -334,7 +334,7 @@ class Stream:
)
# Delegated to a shared fetch method
- self._fetch(elements)
+ self._fetch(elements, announce_session=True)
# track()
#
@@ -365,7 +365,7 @@ class Stream:
track_queue = TrackQueue(self._scheduler)
self._add_queue(track_queue, track=True)
self._enqueue_plan(elements, queue=track_queue)
- self._run()
+ self._run(announce_session=True)
# source_push()
#
@@ -407,7 +407,7 @@ class Stream:
self._add_queue(SourcePushQueue(self._scheduler))
self._enqueue_plan(elements)
- self._run()
+ self._run(announce_session=True)
# pull()
#
@@ -444,7 +444,7 @@ class Stream:
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(elements)
- self._run()
+ self._run(announce_session=True)
# push()
#
@@ -510,7 +510,7 @@ class Stream:
push_queue = ArtifactPushQueue(self._scheduler)
self._add_queue(push_queue)
self._enqueue_plan(cached_elements, queue=push_queue)
- self._run()
+ self._run(announce_session=True)
# If the user has selected to continue on error, fail the command
# and print a summary of artifacts which could not be pushed
@@ -577,7 +577,7 @@ class Stream:
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(uncached_elts)
- self._run()
+ self._run(announce_session=True)
try:
scope = {
@@ -1354,14 +1354,17 @@ class Stream:
#
# Common function for running the scheduler
#
- def _run(self):
+ # Args:
+ # announce_session (bool): Whether to announce the session in the frontend.
+ #
+ def _run(self, *, announce_session: bool = False):
# Inform the frontend of the full list of elements
# and the list of elements which will be processed in this run
#
self.total_elements = list(self._pipeline.dependencies(self.targets, Scope.ALL))
- if self._session_start_callback is not None:
+ if announce_session and self._session_start_callback is not None:
self._session_start_callback()
self._running = True
@@ -1380,9 +1383,10 @@ class Stream:
#
# Args:
# elements (list of Element): Elements to fetch
- # fetch_original (Bool): Whether to fetch original unstaged
+ # fetch_original (bool): Whether to fetch original unstaged
+ # announce_session (bool): Whether to announce the session in the frontend
#
- def _fetch(self, elements, *, fetch_original=False):
+ def _fetch(self, elements: List[Element], *, fetch_original: bool = False, announce_session: bool = False):
# Assert consistency for the fetch elements
self._pipeline.assert_consistent(elements)
@@ -1392,7 +1396,7 @@ class Stream:
self._scheduler.clear_queues()
self._add_queue(FetchQueue(self._scheduler, fetch_original=fetch_original))
self._enqueue_plan(elements)
- self._run()
+ self._run(announce_session=announce_session)
# _check_location_writable()
#
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index a54a17ff1..f1fc353a7 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -449,7 +449,7 @@ class Workspaces:
def _load_config(self):
workspace_file = self._get_filename()
try:
- node = _yaml.load(workspace_file)
+ node = _yaml.load(workspace_file, shortname="workspaces.yml")
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
# Return an empty dict if there was no workspace file
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index 1e59b2a1c..17524cd1b 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -255,7 +255,7 @@ cdef class Representer:
#
# Raises: LoadError
#
-cpdef MappingNode load(str filename, str shortname=None, bint copy_tree=False, object project=None):
+cpdef MappingNode load(str filename, str shortname, bint copy_tree=False, object project=None):
cdef MappingNode data
if not shortname:
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index d1d11c4c1..b57f10a66 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -895,13 +895,13 @@ class Element(Plugin):
if meta in cls.__instantiated_elements:
return cls.__instantiated_elements[meta]
- element = meta.project.create_element(meta, first_pass=meta.first_pass)
+ element = meta.project.create_element(meta)
cls.__instantiated_elements[meta] = element
# Instantiate sources and generate their keys
for meta_source in meta.sources:
meta_source.first_pass = meta.is_junction
- source = meta.project.create_source(meta_source, variables=element.__variables, first_pass=meta.first_pass)
+ source = meta.project.create_source(meta_source, variables=element.__variables)
redundant_ref = source._load_ref()
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index f09c5bda1..19c19a64c 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -22,8 +22,7 @@ This package contains various utilities which make it easier to test plugins.
import os
from collections import OrderedDict
from buildstream.exceptions import ErrorDomain, LoadErrorReason
-from buildstream._yaml import load as load_yaml # type: ignore
-from ._yaml import generate_project, generate_element
+from ._yaml import generate_project, generate_element, load_yaml
from .repo import Repo
from .runcli import cli, cli_integration, cli_remote_execution
from .integration import integration_cache
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index 69042747c..836ef0580 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -255,7 +255,7 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
result.assert_success()
# Tracking tries upstream first. Check the ref is from upstream.
- new_element = _yaml.load(element_path)
+ new_element = _yaml.load(element_path, shortname=element_name)
source = new_element.get_sequence("sources").mapping_at(0)
if "ref" in source:
assert source.get_str("ref") == upstream_ref
@@ -300,7 +300,7 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
result.assert_success()
# Check that tracking fell back to the mirror
- new_element = _yaml.load(element_path)
+ new_element = _yaml.load(element_path, shortname=element_name)
source = new_element.get_sequence("sources").mapping_at(0)
if "ref" in source:
assert source.get_str("ref") == mirror_ref
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index ecb508b1a..38ef217f0 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -284,7 +284,7 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
else:
assert not os.path.exists(os.path.join(project, "project.refs"))
- new_sources = _yaml.load(os.path.join(element_path, "sources.yml"))
+ new_sources = _yaml.load(os.path.join(element_path, "sources.yml"), shortname="sources.yml")
# Get all of the sources
assert "sources" in new_sources
diff --git a/src/buildstream/testing/_yaml.py b/src/buildstream/testing/_yaml.py
index 0a16f3226..396266b85 100644
--- a/src/buildstream/testing/_yaml.py
+++ b/src/buildstream/testing/_yaml.py
@@ -1,6 +1,9 @@
import os
+from buildstream import _yaml
-from buildstream._yaml import roundtrip_dump # type: ignore
+
+def load_yaml(filename):
+ return _yaml.load(filename, shortname=os.path.basename(filename))
def generate_project(project_dir, config=None):
@@ -11,11 +14,11 @@ def generate_project(project_dir, config=None):
config["name"] = os.path.basename(project_dir)
if "min-version" not in config:
config["min-version"] = "2.0"
- roundtrip_dump(config, project_file)
+ _yaml.roundtrip_dump(config, project_file)
def generate_element(element_dir, element_name, config=None):
if config is None:
config = {}
element_path = os.path.join(element_dir, element_name)
- roundtrip_dump(config, element_path)
+ _yaml.roundtrip_dump(config, element_path)
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 712e6811a..0f08bd141 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -539,7 +539,7 @@ class CliIntegration(Cli):
with open(temp_project, "w") as f:
yaml.safe_dump(project_config, f)
- project_config = _yaml.load(temp_project)
+ project_config = _yaml.load(temp_project, shortname="project.conf")
project_config._composite(base_config)
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 3d97fc7b9..cf76defbd 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -226,6 +226,24 @@ class _PipelineSelection(FastEnum):
return str(self.value)
+# _ProjectInformation()
+#
+# A descriptive object about a project.
+#
+# Args:
+# project (Project): The project instance
+# provenance (ProvenanceInformation): The provenance information, if any
+# duplicates (list): List of project descriptions which declared this project as a duplicate
+# internal (list): List of project descriptions which declared this project as internal
+#
+class _ProjectInformation:
+ def __init__(self, project, provenance, duplicates, internal):
+ self.project = project
+ self.provenance = provenance
+ self.duplicates = duplicates
+ self.internal = internal
+
+
########################################
# Type aliases #
########################################
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index df7ee9473..18c48b4a7 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -16,7 +16,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions"
def project_set_artifacts(project, url):
project_conf_file = os.path.join(project, "project.conf")
- project_config = _yaml.load(project_conf_file)
+ project_config = _yaml.load(project_conf_file, shortname=None)
project_config["artifacts"] = {"url": url, "push": True}
_yaml.roundtrip_dump(project_config.strip_node_info(), file=project_conf_file)
diff --git a/tests/elements/filter.py b/tests/elements/filter.py
index 3bc24c20a..443f64d34 100644
--- a/tests/elements/filter.py
+++ b/tests/elements/filter.py
@@ -227,7 +227,7 @@ def test_filter_track(datafiles, cli, tmpdir):
result.assert_success()
# Now check that a ref field exists
- new_input = _yaml.load(input_file)
+ new_input = _yaml.load(input_file, shortname=None)
source_node = new_input.get_sequence("sources").mapping_at(0)
new_input_ref = source_node.get_str("ref")
assert new_input_ref == ref
@@ -273,7 +273,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
result.assert_success()
# Now check that a ref field exists
- new_input = _yaml.load(input_file)
+ new_input = _yaml.load(input_file, shortname=None)
source_node = new_input.get_sequence("sources").mapping_at(0)
assert "ref" not in source_node
@@ -318,7 +318,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
result.assert_success()
# Now check that a ref field exists
- new_input = _yaml.load(input_file)
+ new_input = _yaml.load(input_file, shortname=None)
source_node = new_input.get_sequence("sources").mapping_at(0)
new_ref = source_node.get_str("ref")
assert new_ref == ref
@@ -374,12 +374,12 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
result.assert_success()
# Now check that a ref field exists
- new_input = _yaml.load(input_file)
+ new_input = _yaml.load(input_file, shortname=None)
source_node = new_input.get_sequence("sources").mapping_at(0)
new_ref = source_node.get_str("ref")
assert new_ref == ref
- new_input2 = _yaml.load(input2_file)
+ new_input2 = _yaml.load(input2_file, shortname=None)
source_node2 = new_input2.get_sequence("sources").mapping_at(0)
new_ref2 = source_node2.get_str("ref")
assert new_ref2 == ref
@@ -434,11 +434,11 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
result.assert_success()
# Now check that a ref field exists
- new_input = _yaml.load(input_file)
+ new_input = _yaml.load(input_file, shortname=None)
source_node = new_input.get_sequence("sources").mapping_at(0)
assert "ref" not in source_node
- new_input2 = _yaml.load(input2_file)
+ new_input2 = _yaml.load(input2_file, shortname=None)
source_node2 = new_input2.get_sequence("sources").mapping_at(0)
new_ref2 = source_node2.get_str("ref")
assert new_ref2 == ref
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index 3224822af..8148e18fa 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -23,7 +23,7 @@ def test_main_has_priority(tmpdir):
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
@@ -36,7 +36,7 @@ def test_include_cannot_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": {"(>)": ["a"]}}, str(tmpdir.join("a.yml")))
@@ -49,7 +49,7 @@ def test_main_can_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": {"(>)": ["main"]}}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
@@ -62,7 +62,7 @@ def test_sibling_cannot_append_backward(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": {"(>)": ["a"]}}, str(tmpdir.join("a.yml")))
_yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml")))
@@ -76,7 +76,7 @@ def test_sibling_can_append_forward(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
_yaml.roundtrip_dump({"test": {"(>)": ["b"]}}, str(tmpdir.join("b.yml")))
@@ -90,7 +90,7 @@ def test_lastest_sibling_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
_yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml")))
@@ -104,7 +104,7 @@ def test_main_keeps_keys(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump({"(@)": ["a.yml"], "something": "else"}, str(tmpdir.join("main.yml")))
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
@@ -121,7 +121,7 @@ def test_overwrite_directive_on_later_composite(tmpdir):
{"(@)": ["a.yml", "b.yml"], "test": {"(=)": ["Overwritten"]}}, str(tmpdir.join("main.yml"))
)
- main = _yaml.load(str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
# a.yml
_yaml.roundtrip_dump(
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index 3f897fb14..c3af27bea 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -33,7 +33,7 @@ def test_defaults(cli, tmpdir):
result = cli.run(args=["init", "--project-name", "foo", project])
result.assert_success()
- project_conf = _yaml.load(project_path)
+ project_conf = _yaml.load(project_path, shortname=None)
assert project_conf.get_str("name") == "foo"
assert project_conf.get_str("min-version") == get_default_min_version()
assert project_conf.get_str("element-path") == "elements"
@@ -48,7 +48,7 @@ def test_all_options(cli, tmpdir):
)
result.assert_success()
- project_conf = _yaml.load(project_path)
+ project_conf = _yaml.load(project_path, shortname=None)
assert project_conf.get_str("name") == "foo"
assert project_conf.get_str("min-version") == "2.0"
assert project_conf.get_str("element-path") == "ponies"
@@ -81,7 +81,7 @@ def test_force_overwrite_project(cli, tmpdir):
result = cli.run(args=["init", "--project-name", "foo", "--force", project])
result.assert_success()
- project_conf = _yaml.load(project_path)
+ project_conf = _yaml.load(project_path, shortname=None)
assert project_conf.get_str("name") == "foo"
assert project_conf.get_str("min-version") == get_default_min_version()
@@ -95,7 +95,7 @@ def test_relative_path_directory_as_argument(cli, tmpdir):
result = cli.run(args=["init", "--project-name", "foo", rel_path])
result.assert_success()
- project_conf = _yaml.load(project_path)
+ project_conf = _yaml.load(project_path, shortname=None)
assert project_conf.get_str("name") == "foo"
assert project_conf.get_str("min-version") == get_default_min_version()
assert project_conf.get_str("element-path") == "elements"
@@ -149,7 +149,7 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
full_element_path = project.joinpath(element_path)
assert full_element_path.exists()
- project_conf = _yaml.load(str(project_conf_path))
+ project_conf = _yaml.load(str(project_conf_path), shortname=None)
assert project_conf.get_str("name") == "project_name"
assert project_conf.get_str("min-version") == "2.0"
assert project_conf.get_str("element-path") == element_path
diff --git a/tests/frontend/interactive_init.py b/tests/frontend/interactive_init.py
index b8cbe522f..c05fd4e37 100644
--- a/tests/frontend/interactive_init.py
+++ b/tests/frontend/interactive_init.py
@@ -36,7 +36,7 @@ def test_init(tmpdir):
session.close()
# Now assert that a project.conf got created with expected values
- project_conf = _yaml.load(os.path.join(str(tmpdir), "project.conf"))
+ project_conf = _yaml.load(os.path.join(str(tmpdir), "project.conf"), shortname=None)
assert project_conf.get_str("name") == name
assert project_conf.get_str("min-version") == min_version
assert project_conf.get_str("element-path") == element_path
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 4aae61ad7..813e68196 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -167,7 +167,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
assert os.path.isdir(bzrdir)
# Check that the correct origin branch is set
- element_config = _yaml.load(os.path.join(project, "elements", element_name))
+ element_config = _yaml.load(os.path.join(project, "elements", element_name), shortname=None)
source_config = element_config.get_sequence("sources").mapping_at(0)
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
stripped_url = source_config.get_str("url").lstrip("file:///")
@@ -859,7 +859,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
def parse_dict_as_yaml(node):
tempfile = os.path.join(str(tmpdir), "yaml_dump")
_yaml.roundtrip_dump(node, tempfile)
- return _yaml.load(tempfile).strip_node_info()
+ return _yaml.load(tempfile, shortname=None).strip_node_info()
project = str(datafiles)
os.makedirs(os.path.join(project, ".bst"))
@@ -871,7 +871,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
- loaded_config = _yaml.load(workspace_config_path).strip_node_info()
+ loaded_config = _yaml.load(workspace_config_path, shortname=None).strip_node_info()
# Check that workspace config remains the same if no modifications
# to workspaces were made
@@ -900,7 +900,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
result.assert_success()
# Check that workspace config is converted correctly if necessary
- loaded_config = _yaml.load(workspace_config_path).strip_node_info()
+ loaded_config = _yaml.load(workspace_config_path, shortname=None).strip_node_info()
assert loaded_config == parse_dict_as_yaml(expected)
@@ -1052,7 +1052,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
# Delete the ref from the source so that we can detect if the
# element has been tracked after closing the workspace
- element_contents = _yaml.load(element_file)
+ element_contents = _yaml.load(element_file, shortname=None)
ref1 = element_contents.get_sequence("sources").mapping_at(0).get_str("ref")
del element_contents.get_sequence("sources").mapping_at(0)["ref"]
_yaml.roundtrip_dump(element_contents, element_file)
@@ -1061,7 +1061,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
result.assert_success()
# Element is not tracked now
- element_contents = _yaml.load(element_file)
+ element_contents = _yaml.load(element_file, shortname=None)
assert "ref" not in element_contents.get_sequence("sources").mapping_at(0)
# close the workspace
@@ -1072,7 +1072,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
- element_contents = _yaml.load(element_file)
+ element_contents = _yaml.load(element_file, shortname=None)
ref2 = element_contents.get_sequence("sources").mapping_at(0).get_str("ref")
# these values should be equivalent
assert ref1 == ref2
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index a4f8d08cc..4727d5d28 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -16,7 +16,7 @@ def test_load_yaml(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- loaded = _yaml.load(filename)
+ loaded = _yaml.load(filename, shortname=None)
assert loaded.get_str("kind") == "pony"
@@ -35,7 +35,7 @@ def test_basic_provenance(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- loaded = _yaml.load(filename)
+ loaded = _yaml.load(filename, shortname=None)
assert loaded.get_str("kind") == "pony"
assert_provenance(filename, 1, 0, loaded)
@@ -46,7 +46,7 @@ def test_member_provenance(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- loaded = _yaml.load(filename)
+ loaded = _yaml.load(filename, shortname=None)
assert loaded.get_str("kind") == "pony"
assert_provenance(filename, 2, 13, loaded.get_scalar("description"))
@@ -56,7 +56,7 @@ def test_element_provenance(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- loaded = _yaml.load(filename)
+ loaded = _yaml.load(filename, shortname=None)
assert loaded.get_str("kind") == "pony"
assert_provenance(filename, 5, 2, loaded.get_sequence("moods").scalar_at(1))
@@ -67,11 +67,11 @@ def test_mapping_validate_keys(datafiles):
valid = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
invalid = os.path.join(datafiles.dirname, datafiles.basename, "invalid.yaml")
- base = _yaml.load(valid)
+ base = _yaml.load(valid, shortname=None)
base.validate_keys(["kind", "description", "moods", "children", "extra"])
- base = _yaml.load(invalid)
+ base = _yaml.load(invalid, shortname=None)
with pytest.raises(LoadError) as exc:
base.validate_keys(["kind", "description", "moods", "children", "extra"])
@@ -84,7 +84,7 @@ def test_node_get(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- base = _yaml.load(filename)
+ base = _yaml.load(filename, shortname=None)
assert base.get_str("kind") == "pony"
children = base.get_sequence("children")
@@ -106,7 +106,7 @@ def test_node_set(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- base = _yaml.load(filename)
+ base = _yaml.load(filename, shortname=None)
assert "mother" not in base
base["mother"] = "snow white"
@@ -118,7 +118,7 @@ def test_node_set_overwrite(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- base = _yaml.load(filename)
+ base = _yaml.load(filename, shortname=None)
# Overwrite a string
assert base.get_str("kind") == "pony"
@@ -136,7 +136,7 @@ def test_node_set_list_element(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- base = _yaml.load(filename)
+ base = _yaml.load(filename, shortname=None)
assert base.get_str_list("moods") == ["happy", "sad"]
base.get_sequence("moods")[0] = "confused"
@@ -154,8 +154,8 @@ def test_composite_preserve_originals(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
overlayfile = os.path.join(datafiles.dirname, datafiles.basename, "composite.yaml")
- base = _yaml.load(filename)
- overlay = _yaml.load(overlayfile)
+ base = _yaml.load(filename, shortname=None)
+ overlay = _yaml.load(overlayfile, shortname=None)
base_copy = base.clone()
overlay._composite(base_copy)
@@ -216,7 +216,7 @@ def test_list_composition(datafiles, filename, tmpdir, index, length, mood, prov
base_file = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
- base = _yaml.load(base_file, "basics.yaml")
+ base = _yaml.load(base_file, shortname="basics.yaml")
overlay = _yaml.load(overlay_file, shortname=filename)
overlay._composite(base)
@@ -369,7 +369,7 @@ def test_convert_value_to_string(datafiles):
conf_file = os.path.join(datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml")
# Run file through yaml to convert it
- test_dict = _yaml.load(conf_file)
+ test_dict = _yaml.load(conf_file, shortname=None)
user_config = test_dict.get_str("Test1")
assert isinstance(user_config, str)
@@ -393,7 +393,7 @@ def test_value_doesnt_match_expected(datafiles):
conf_file = os.path.join(datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml")
# Run file through yaml to convert it
- test_dict = _yaml.load(conf_file)
+ test_dict = _yaml.load(conf_file, shortname=None)
with pytest.raises(LoadError) as exc:
test_dict.get_int("Test4")
@@ -445,7 +445,7 @@ def test_node_find_target(datafiles, case):
filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml")
# We set copy_tree in order to ensure that the nodes in `loaded`
# are not the same nodes as in `prov.toplevel`
- loaded = _yaml.load(filename, copy_tree=True)
+ loaded = _yaml.load(filename, shortname=None, copy_tree=True)
prov = loaded.get_provenance()
@@ -477,7 +477,7 @@ def test_node_find_target(datafiles, case):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_find_target_fails(datafiles):
filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml")
- loaded = _yaml.load(filename, copy_tree=True)
+ loaded = _yaml.load(filename, shortname=None, copy_tree=True)
brand_new = Node.from_dict({})