summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-07-15 14:57:59 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-07-15 14:57:59 +0000
commit12c919a4bb3f8b1383d2e34709739a82874e34ba (patch)
treea889c216eda656c222cfe9a6619a8e7f7c92256a
parent0d4905220637bb3c49161d07834da4fa11d8af2c (diff)
parentbecce86e51c188ae0c48638a9d96eb1decf5d40e (diff)
downloadbuildstream-12c919a4bb3f8b1383d2e34709739a82874e34ba.tar.gz
Merge branch 'bschubert/new-node-api' into 'master'
Rewrite of the Node API See merge request BuildStream/buildstream!1472
-rw-r--r--.pylintrc1
-rw-r--r--NEWS21
-rwxr-xr-xdoc/bst2html.py29
-rw-r--r--doc/source/core_framework.rst1
-rwxr-xr-xsetup.py7
-rw-r--r--src/buildstream/__init__.py1
-rw-r--r--src/buildstream/_artifact.py2
-rw-r--r--src/buildstream/_basecache.py6
-rw-r--r--src/buildstream/_cachekey.py4
-rw-r--r--src/buildstream/_cas/casremote.py21
-rw-r--r--src/buildstream/_context.py88
-rw-r--r--src/buildstream/_frontend/app.py6
-rw-r--r--src/buildstream/_frontend/cli.py2
-rw-r--r--src/buildstream/_frontend/widget.py10
-rw-r--r--src/buildstream/_gitsourcebase.py40
-rw-r--r--src/buildstream/_includes.py33
-rw-r--r--src/buildstream/_loader/loadelement.py4
-rw-r--r--src/buildstream/_loader/loader.py39
-rw-r--r--src/buildstream/_loader/metaelement.py12
-rw-r--r--src/buildstream/_loader/types.pyx47
-rw-r--r--src/buildstream/_options/option.py11
-rw-r--r--src/buildstream/_options/optionarch.py3
-rw-r--r--src/buildstream/_options/optionbool.py9
-rw-r--r--src/buildstream/_options/optionenum.py25
-rw-r--r--src/buildstream/_options/optionflags.py25
-rw-r--r--src/buildstream/_options/optionpool.py61
-rw-r--r--src/buildstream/_plugincontext.py13
-rw-r--r--src/buildstream/_project.py192
-rw-r--r--src/buildstream/_projectrefs.py36
-rw-r--r--src/buildstream/_stream.py2
-rw-r--r--src/buildstream/_variables.pyx20
-rw-r--r--src/buildstream/_workspaces.py47
-rw-r--r--src/buildstream/_yaml.pxd46
-rw-r--r--src/buildstream/_yaml.pyx1125
-rw-r--r--src/buildstream/buildelement.py15
-rw-r--r--src/buildstream/element.py229
-rw-r--r--src/buildstream/node.pxd112
-rw-r--r--src/buildstream/node.pyx1556
-rw-r--r--src/buildstream/plugin.py198
-rw-r--r--src/buildstream/plugins/elements/compose.py10
-rw-r--r--src/buildstream/plugins/elements/filter.py19
-rw-r--r--src/buildstream/plugins/elements/import.py2
-rw-r--r--src/buildstream/plugins/elements/junction.py8
-rw-r--r--src/buildstream/plugins/elements/script.py7
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py10
-rw-r--r--src/buildstream/plugins/sources/bzr.py10
-rw-r--r--src/buildstream/plugins/sources/deb.py2
-rw-r--r--src/buildstream/plugins/sources/local.py4
-rw-r--r--src/buildstream/plugins/sources/patch.py4
-rw-r--r--src/buildstream/plugins/sources/pip.py14
-rw-r--r--src/buildstream/plugins/sources/remote.py6
-rw-r--r--src/buildstream/plugins/sources/tar.py5
-rw-r--r--src/buildstream/plugins/sources/zip.py5
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py28
-rw-r--r--src/buildstream/source.py63
-rw-r--r--src/buildstream/testing/_sourcetests/build_checkout.py5
-rw-r--r--src/buildstream/testing/_sourcetests/fetch.py7
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py40
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py2
-rw-r--r--src/buildstream/testing/_sourcetests/track.py16
-rw-r--r--src/buildstream/testing/_sourcetests/track_cross_junction.py6
-rw-r--r--src/buildstream/testing/_sourcetests/workspace.py5
-rw-r--r--src/buildstream/testing/_utils/__init__.py2
-rw-r--r--src/buildstream/testing/_utils/junction.py2
-rw-r--r--src/buildstream/testing/runcli.py6
-rw-r--r--tests/artifactcache/cache_size.py4
-rw-r--r--tests/artifactcache/config.py6
-rw-r--r--tests/artifactcache/junctions.py6
-rw-r--r--tests/artifactcache/pull.py4
-rw-r--r--tests/artifactcache/push.py4
-rw-r--r--tests/cachekey/cachekey.py4
-rw-r--r--tests/elements/filter.py68
-rw-r--r--tests/elements/filter/basic/element_plugins/dynamic.py6
-rw-r--r--tests/format/include.py40
-rw-r--r--tests/format/include_composition.py109
-rw-r--r--tests/format/junctions.py8
-rw-r--r--tests/format/optionarch.py2
-rw-r--r--tests/format/optionbool.py4
-rw-r--r--tests/format/optioneltmask.py4
-rw-r--r--tests/format/optionenum.py4
-rw-r--r--tests/format/optionexports.py2
-rw-r--r--tests/format/optionflags.py4
-rw-r--r--tests/format/optionos.py2
-rw-r--r--tests/format/optionoverrides.py2
-rw-r--r--tests/format/options.py22
-rw-r--r--tests/format/project.py12
-rw-r--r--tests/format/projectoverrides.py2
-rw-r--r--tests/format/variables.py14
-rw-r--r--tests/frontend/__init__.py2
-rw-r--r--tests/frontend/buildcheckout.py31
-rw-r--r--tests/frontend/buildtrack.py6
-rw-r--r--tests/frontend/configurable_warnings.py2
-rw-r--r--tests/frontend/cross_junction_workspace.py31
-rw-r--r--tests/frontend/fetch.py18
-rw-r--r--tests/frontend/init.py28
-rw-r--r--tests/frontend/logging.py12
-rw-r--r--tests/frontend/mirror.py34
-rw-r--r--tests/frontend/order.py2
-rw-r--r--tests/frontend/overlaps.py2
-rw-r--r--tests/frontend/project/sources/fetch_source.py13
-rw-r--r--tests/frontend/remote-caches.py2
-rw-r--r--tests/frontend/show.py17
-rw-r--r--tests/frontend/source_checkout.py2
-rw-r--r--tests/frontend/track.py20
-rw-r--r--tests/frontend/workspace.py55
-rw-r--r--tests/integration/cachedfail.py10
-rw-r--r--tests/integration/compose.py2
-rw-r--r--tests/integration/import.py2
-rw-r--r--tests/integration/manual.py2
-rw-r--r--tests/integration/messages.py4
-rw-r--r--tests/integration/pip_element.py4
-rw-r--r--tests/integration/pip_source.py6
-rw-r--r--tests/integration/script.py2
-rw-r--r--tests/integration/shell.py2
-rw-r--r--tests/integration/source-determinism.py2
-rw-r--r--tests/integration/workspace.py16
-rw-r--r--tests/internals/pluginfactory.py34
-rw-r--r--tests/internals/yaml.py166
-rw-r--r--tests/remoteexecution/buildfail.py2
-rw-r--r--tests/remoteexecution/junction.py6
-rw-r--r--tests/sandboxes/fallback.py2
-rw-r--r--tests/sandboxes/missing_dependencies.py4
-rw-r--r--tests/sandboxes/remote-exec-config.py6
-rw-r--r--tests/sandboxes/selection.py4
-rw-r--r--tests/sourcecache/cache.py2
-rw-r--r--tests/sourcecache/config.py2
-rw-r--r--tests/sourcecache/fetch.py12
-rw-r--r--tests/sourcecache/push.py14
-rw-r--r--tests/sources/bzr.py2
-rw-r--r--tests/sources/deb.py2
-rw-r--r--tests/sources/git.py108
-rw-r--r--tests/sources/local.py2
-rw-r--r--tests/sources/no_fetch_cached.py2
-rw-r--r--tests/sources/pip.py2
-rw-r--r--tests/sources/previous_source_access.py6
-rw-r--r--tests/sources/previous_source_access/plugins/sources/foo_transform.py4
-rw-r--r--tests/sources/remote.py4
-rw-r--r--tests/sources/tar.py6
-rw-r--r--tests/sources/zip.py4
-rw-r--r--tests/testutils/__init__.py1
-rw-r--r--tests/testutils/element_generators.py2
-rw-r--r--tests/testutils/junction.py2
-rw-r--r--tests/testutils/yaml.py47
143 files changed, 2894 insertions, 2603 deletions
diff --git a/.pylintrc b/.pylintrc
index c6f52b889..63ff1b756 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -4,6 +4,7 @@
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
+ buildstream.node,
buildstream._loader._loader,
buildstream._loader.types,
buildstream._variables,
diff --git a/NEWS b/NEWS
index 4b1d63a66..2fabe0c65 100644
--- a/NEWS
+++ b/NEWS
@@ -2,14 +2,15 @@
buildstream 1.3.1
=================
- o BREAKING CHANGE: `bst init` no longer uses the `--directory` or `-C`
- option. Instead, it (optionally) takes a directory as an argument.
+ o BREAKING CHANGE: The yaml API has been rewritten entirely. When accessing
+ configuration from YAML, please use the new `Node` classes exposed in the
+ `buildstream` package. See the documentation for how to use it.
- o BREAKING CHANGE: `Plugin.node_items` doesn't accept 'dict' values anymore.
- Please used `Node`s instead.
+ This change has also removed the need of a YAML cache as it proved to no longer
+ be of benefit.
- o BREAKING CHANGE: `node_get_member` doesn't accept 'expected_type=Mapping'
- anymore. Please use 'expected_type=dict' instead.
+ o BREAKING CHANGE: `bst init` no longer uses the `--directory` or `-C`
+ option. Instead, it (optionally) takes a directory as an argument.
o BREAKING CHANGE: Artifact as a Proto. The caching of buildstream artifacts
has changed from a reference based impelementation. Existing artifacts and
@@ -20,14 +21,6 @@ buildstream 1.3.1
o BREAKING CHANGE: ostree plugin has been removed. It now leaves in the
bst-plugins-experimental repo
- o BREAKING CHANGE: YAML New World Order. The parsing and loading of .bst files
- and node handling has been completely rewritten and is now faster. This now
- requires that plugin authors must use the Plugin API to access or modify node
- members. Regular dictionary accesses are no longer valid.
-
- This change has also removed the need of a YAML cache as it proved to no longer
- be of benefit.
-
o Added `bst artifact delete` subcommand. This command removes artifacts from
the local cache. Mulitple element names and artifact refs can be specified
as arguments.
diff --git a/doc/bst2html.py b/doc/bst2html.py
index 448ab1389..71d497eda 100755
--- a/doc/bst2html.py
+++ b/doc/bst2html.py
@@ -197,7 +197,7 @@ def workdir(source_cache=None):
'sourcedir': source_cache,
'logdir': os.path.join(tempdir, 'logs'),
}
- _yaml.dump(config, bst_config_file)
+ _yaml.roundtrip_dump(config, bst_config_file)
yield (tempdir, bst_config_file, source_cache)
@@ -332,9 +332,9 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
#
if not force:
needs_build = False
- commands = _yaml.node_get(desc, list, 'commands')
+ commands = desc.get_sequence('commands')
for command in commands:
- output = _yaml.node_get(command, str, 'output', default_value=None)
+ output = command.get_str('output', default=None)
if output is not None and check_needs_build(desc_dir, output, force=False):
needs_build = True
break
@@ -347,8 +347,9 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# tarball. This workaround lets us build docs from
# a source distribution tarball.
#
- symlinks = _yaml.node_get(desc, dict, 'workaround-symlinks', default_value={})
- for symlink, target in _yaml.node_items(symlinks):
+ symlinks = desc.get_mapping('workaround-symlinks', default={})
+ for symlink, target in symlinks.items():
+ target = target.as_str()
# Resolve real path to where symlink should be
symlink = os.path.join(desc_dir, symlink)
@@ -367,7 +368,7 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# not a source distribution, no need to complain
pass
- remove_files = _yaml.node_get(desc, list, 'remove-files', default_value=[])
+ remove_files = desc.get_sequence('remove-files', default=[]).as_str_list()
for remove_file in remove_files:
remove_file = os.path.join(desc_dir, remove_file)
remove_file = os.path.realpath(remove_file)
@@ -379,23 +380,21 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# Run commands
#
- commands = _yaml.node_get(desc, list, 'commands')
- for c in commands:
- command = _yaml.node_get(desc, dict, 'commands', indices=[commands.index(c)])
-
+ commands = desc.get_sequence('commands')
+ for command in commands:
# Get the directory where this command should be run
- directory = _yaml.node_get(command, str, 'directory')
+ directory = command.get_str('directory')
directory = os.path.join(desc_dir, directory)
directory = os.path.realpath(directory)
# Get the command string
- command_str = _yaml.node_get(command, str, 'command')
+ command_str = command.get_str('command')
# Check whether this is a shell command and not a bst command
- is_shell = _yaml.node_get(command, bool, 'shell', default_value=False)
+ is_shell = command.get_bool('shell', default=False)
# Check if there is fake output
- command_fake_output = _yaml.node_get(command, str, 'fake-output', default_value=None)
+ command_fake_output = command.get_str('fake-output', default=None)
# Run the command, or just use the fake output
if command_fake_output is None:
@@ -407,7 +406,7 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
command_out = command_fake_output
# Encode and save the output if that was asked for
- output = _yaml.node_get(command, str, 'output', default_value=None)
+ output = command.get_str('output', default=None)
if output is not None:
# Convert / Generate a nice <div>
converted = generate_html(command_out, directory, config_file,
diff --git a/doc/source/core_framework.rst b/doc/source/core_framework.rst
index fe2a59c5c..60fbc5539 100644
--- a/doc/source/core_framework.rst
+++ b/doc/source/core_framework.rst
@@ -13,6 +13,7 @@ useful for working on BuildStream itself.
:maxdepth: 1
buildstream.types
+ buildstream.node
buildstream.plugin
buildstream.source
buildstream.element
diff --git a/setup.py b/setup.py
index 330b1d411..1ea423e1a 100755
--- a/setup.py
+++ b/setup.py
@@ -402,10 +402,11 @@ def register_cython_module(module_name, dependencies=None):
BUILD_EXTENSIONS = []
+register_cython_module("buildstream.node")
register_cython_module("buildstream._loader._loader")
-register_cython_module("buildstream._loader.types", dependencies=["buildstream._yaml"])
-register_cython_module("buildstream._yaml")
-register_cython_module("buildstream._variables", dependencies=["buildstream._yaml"])
+register_cython_module("buildstream._loader.types", dependencies=["buildstream.node"])
+register_cython_module("buildstream._yaml", dependencies=["buildstream.node"])
+register_cython_module("buildstream._variables", dependencies=["buildstream.node"])
#####################################################
# Main setup() Invocation #
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index 62890a62f..cd8d0f1cf 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -29,6 +29,7 @@ if "_BST_COMPLETION" not in os.environ:
from .utils import UtilError, ProgramNotFoundError
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
from .types import Scope, Consistency, CoreWarnings
+ from .node import MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
from .plugin import Plugin
from .source import Source, SourceError, SourceFetcher
from .element import Element, ElementError
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index ec574e335..4e7fa4911 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -147,7 +147,7 @@ class Artifact():
# Store public data
with utils._tempnamedfile_name(dir=self._tmpdir) as tmpname:
- _yaml.dump(publicdata, tmpname)
+ _yaml.roundtrip_dump(publicdata, tmpname)
public_data_digest = self._cas.add_object(path=tmpname, link_directly=True)
artifact.public_data.CopyFrom(public_data_digest)
size += public_data_digest.size_bytes
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index a29973158..56f6d68dc 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -74,12 +74,12 @@ class BaseCache():
cache_specs = []
try:
- artifacts = [_yaml.node_get(config_node, dict, cls.config_node_name)]
+ artifacts = [config_node.get_mapping(cls.config_node_name)]
except LoadError:
try:
- artifacts = _yaml.node_get(config_node, list, cls.config_node_name, default_value=[])
+ artifacts = config_node.get_sequence(cls.config_node_name, default=[])
except LoadError:
- provenance = _yaml.node_get_provenance(config_node, key=cls.config_node_name)
+ provenance = config_node.get_node(cls.config_node_name).get_provenance()
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
(str(provenance)))
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index e56b582fa..89d47671e 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -22,7 +22,6 @@ import hashlib
import ujson
-from . import _yaml
# Internal record of the size of a cache key
_CACHEKEY_SIZE = len(hashlib.sha256().hexdigest())
@@ -63,6 +62,5 @@ def is_key(key):
# (str): An sha256 hex digest of the given value
#
def generate_key(value):
- ordered = _yaml.node_sanitize(value)
- ustring = ujson.dumps(ordered, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
+ ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index cd46e9c38..f0c84f7b6 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -8,7 +8,6 @@ import uuid
import grpc
-from .. import _yaml
from .._protos.google.rpc import code_pb2
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
@@ -31,35 +30,35 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
#
@staticmethod
def _new_from_config_node(spec_node, basedir=None):
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
- url = _yaml.node_get(spec_node, str, 'url')
- push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
+ spec_node.validate_keys(['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
+ url = spec_node.get_str('url')
+ push = spec_node.get_bool('push', default=False)
if not url:
- provenance = _yaml.node_get_provenance(spec_node, 'url')
+ provenance = spec_node.get_node('url').get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: empty artifact cache URL".format(provenance))
- instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
+ instance_name = spec_node.get_str('instance-name', default=None)
- server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
+ server_cert = spec_node.get_str('server-cert', default=None)
if server_cert and basedir:
server_cert = os.path.join(basedir, server_cert)
- client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
+ client_key = spec_node.get_str('client-key', default=None)
if client_key and basedir:
client_key = os.path.join(basedir, client_key)
- client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
+ client_cert = spec_node.get_str('client-cert', default=None)
if client_cert and basedir:
client_cert = os.path.join(basedir, client_cert)
if client_key and not client_cert:
- provenance = _yaml.node_get_provenance(spec_node, 'client-key')
+ provenance = spec_node.get_node('client-key').get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: 'client-key' was specified without 'client-cert'".format(provenance))
if client_cert and not client_key:
- provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
+ provenance = spec_node.get_node('client-cert').get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 2bdf5b0b4..c29910418 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -30,6 +30,7 @@ from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
from ._cas import CASCache, CASQuota, CASCacheUsage
from ._workspaces import Workspaces, WorkspaceProjectCache
+from .node import Node
from .sandbox import SandboxRemote
@@ -154,7 +155,7 @@ class Context():
self._artifactcache = None
self._sourcecache = None
self._projects = []
- self._project_overrides = _yaml.new_empty_node()
+ self._project_overrides = Node.from_dict({})
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._cascache = None
@@ -192,7 +193,7 @@ class Context():
if config:
self.config_origin = os.path.abspath(config)
user_config = _yaml.load(config)
- _yaml.composite(defaults, user_config)
+ user_config._composite(defaults)
# Give obsoletion warnings
if 'builddir' in defaults:
@@ -203,7 +204,7 @@ class Context():
raise LoadError(LoadErrorReason.INVALID_DATA,
"artifactdir is obsolete")
- _yaml.node_validate(defaults, [
+ defaults.validate_keys([
'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
'artifacts', 'source-caches', 'logging', 'projects', 'cache', 'prompt',
'workspacedir', 'remote-execution',
@@ -213,7 +214,7 @@ class Context():
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
- path = _yaml.node_get(defaults, str, directory)
+ path = defaults.get_str(directory)
path = os.path.expanduser(path)
path = os.path.expandvars(path)
path = os.path.normpath(path)
@@ -242,10 +243,10 @@ class Context():
# Load quota configuration
# We need to find the first existing directory in the path of our
# cachedir - the cachedir may not have been created yet.
- cache = _yaml.node_get(defaults, dict, 'cache')
- _yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
+ cache = defaults.get_mapping('cache')
+ cache.validate_keys(['quota', 'pull-buildtrees', 'cache-buildtrees'])
- self.config_cache_quota_string = _yaml.node_get(cache, str, 'quota')
+ self.config_cache_quota_string = cache.get_str('quota')
try:
self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
self.casdir)
@@ -262,65 +263,64 @@ class Context():
self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
# Load remote execution config getting pull-artifact-files from it
- remote_execution = _yaml.node_get(defaults, dict, 'remote-execution', default_value=None)
+ remote_execution = defaults.get_mapping('remote-execution', default=None)
if remote_execution:
- self.pull_artifact_files = _yaml.node_get(
- remote_execution, bool, 'pull-artifact-files', default_value=True)
+ self.pull_artifact_files = remote_execution.get_bool('pull-artifact-files', default=True)
# This stops it being used in the remote service set up
- _yaml.node_del(remote_execution, 'pull-artifact-files', safe=True)
+ remote_execution.safe_del('pull-artifact-files')
# Don't pass the remote execution settings if that was the only option
- if _yaml.node_keys(remote_execution) == []:
- _yaml.node_del(defaults, 'remote-execution')
+ if remote_execution.keys() == []:
+ del defaults['remote-execution']
else:
self.pull_artifact_files = True
self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
# Load pull build trees configuration
- self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
+ self.pull_buildtrees = cache.get_bool('pull-buildtrees')
# Load cache build trees configuration
self.cache_buildtrees = _node_get_option_str(
cache, 'cache-buildtrees', ['always', 'auto', 'never'])
# Load logging config
- logging = _yaml.node_get(defaults, dict, 'logging')
- _yaml.node_validate(logging, [
+ logging = defaults.get_mapping('logging')
+ logging.validate_keys([
'key-length', 'verbose',
'error-lines', 'message-lines',
'debug', 'element-format', 'message-format'
])
- self.log_key_length = _yaml.node_get(logging, int, 'key-length')
- self.log_debug = _yaml.node_get(logging, bool, 'debug')
- self.log_verbose = _yaml.node_get(logging, bool, 'verbose')
- self.log_error_lines = _yaml.node_get(logging, int, 'error-lines')
- self.log_message_lines = _yaml.node_get(logging, int, 'message-lines')
- self.log_element_format = _yaml.node_get(logging, str, 'element-format')
- self.log_message_format = _yaml.node_get(logging, str, 'message-format')
+ self.log_key_length = logging.get_int('key-length')
+ self.log_debug = logging.get_bool('debug')
+ self.log_verbose = logging.get_bool('verbose')
+ self.log_error_lines = logging.get_int('error-lines')
+ self.log_message_lines = logging.get_int('message-lines')
+ self.log_message_lines = logging.get_int('message-lines')
+ self.log_element_format = logging.get_str('element-format')
+ self.log_message_format = logging.get_str('message-format')
# Load scheduler config
- scheduler = _yaml.node_get(defaults, dict, 'scheduler')
- _yaml.node_validate(scheduler, [
+ scheduler = defaults.get_mapping('scheduler')
+ scheduler.validate_keys([
'on-error', 'fetchers', 'builders',
'pushers', 'network-retries'
])
self.sched_error_action = _node_get_option_str(
scheduler, 'on-error', ['continue', 'quit', 'terminate'])
- self.sched_fetchers = _yaml.node_get(scheduler, int, 'fetchers')
- self.sched_builders = _yaml.node_get(scheduler, int, 'builders')
- self.sched_pushers = _yaml.node_get(scheduler, int, 'pushers')
- self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
+ self.sched_fetchers = scheduler.get_int('fetchers')
+ self.sched_builders = scheduler.get_int('builders')
+ self.sched_pushers = scheduler.get_int('pushers')
+ self.sched_network_retries = scheduler.get_int('network-retries')
# Load per-projects overrides
- self._project_overrides = _yaml.node_get(defaults, dict, 'projects', default_value={})
+ self._project_overrides = defaults.get_mapping('projects', default={})
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
- for _, overrides in _yaml.node_items(self._project_overrides):
- _yaml.node_validate(overrides,
- ['artifacts', 'source-caches', 'options',
- 'strict', 'default-mirror',
- 'remote-execution'])
+ for overrides in self._project_overrides.values():
+ overrides.validate_keys(['artifacts', 'source-caches', 'options',
+ 'strict', 'default-mirror',
+ 'remote-execution'])
@property
def artifactcache(self):
@@ -402,17 +402,16 @@ class Context():
# get_overrides():
#
# Fetch the override dictionary for the active project. This returns
- # a node loaded from YAML and as such, values loaded from the returned
- # node should be loaded using the _yaml.node_get() family of functions.
+ # a node loaded from YAML.
#
# Args:
# project_name (str): The project name
#
# Returns:
- # (dict): The overrides dictionary for the specified project
+ # (MappingNode): The overrides dictionary for the specified project
#
def get_overrides(self, project_name):
- return _yaml.node_get(self._project_overrides, dict, project_name, default_value={})
+ return self._project_overrides.get_mapping(project_name, default={})
# get_strict():
#
@@ -427,7 +426,7 @@ class Context():
# so work out if we should be strict, and then cache the result
toplevel = self.get_toplevel_project()
overrides = self.get_overrides(toplevel.name)
- self._strict_build_plan = _yaml.node_get(overrides, bool, 'strict', default_value=True)
+ self._strict_build_plan = overrides.get_bool('strict', default=True)
# If it was set by the CLI, it overrides any config
# Ditto if we've already computed this, then we return the computed
@@ -445,7 +444,7 @@ class Context():
if self._cache_key is None:
# Anything that alters the build goes into the unique key
- self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
+ self._cache_key = _cachekey.generate_key({})
return self._cache_key
@@ -493,7 +492,7 @@ class Context():
# _node_get_option_str()
#
-# Like _yaml.node_get(), but also checks value is one of the allowed option
+# Like Node.get_scalar().as_str(), but also checks value is one of the allowed option
# strings. Fetches a value from a dictionary node, and makes sure it's one of
# the pre-defined options.
#
@@ -509,9 +508,10 @@ class Context():
# LoadError, when the value is not of the expected type, or is not found.
#
def _node_get_option_str(node, key, allowed_options):
- result = _yaml.node_get(node, str, key)
+ result_node = node.get_scalar(key)
+ result = result_node.as_str()
if result not in allowed_options:
- provenance = _yaml.node_get_provenance(node, key)
+ provenance = result_node.get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: {} should be one of: {}".format(
provenance, key, ", ".join(allowed_options)))
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index cf4ac2b8d..0479b8c19 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -38,7 +38,7 @@ from .._exceptions import BstError, StreamError, LoadError, LoadErrorReason, App
from .._message import Message, MessageType, unconditional_messages
from .._stream import Stream
from .._versions import BST_FORMAT_VERSION
-from .. import _yaml
+from .. import node
# Import frontend assets
from .profile import Profile
@@ -349,7 +349,7 @@ class App():
if project_name:
# If project name was specified, user interaction is not desired, just
# perform some validation and write the project.conf
- _yaml.assert_symbol_name(None, project_name, 'project name')
+ node._assert_symbol_name(project_name, 'project name')
self._assert_format_version(format_version)
self._assert_element_path(element_path)
@@ -801,7 +801,7 @@ class App():
def project_name_proc(user_input):
try:
- _yaml.assert_symbol_name(None, user_input, 'project name')
+ node._assert_symbol_name(None, user_input, 'project name')
except LoadError as e:
message = "{}\n\n{}\n".format(e, e.detail)
raise UsageError(message) from e
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index e9d67ca87..fda81598d 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -101,7 +101,7 @@ def complete_target(args, incomplete):
return []
# The project is not required to have an element-path
- element_directory = _yaml.node_get(project, str, 'element-path', default_value='')
+ element_directory = project.get_str('element-path', default='')
# If a project was loaded, use its element-path to
# adjust our completion's base directory
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index bc49e2927..fbde249a9 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -28,7 +28,6 @@ import click
from .profile import Profile
from .. import Element, Consistency, Scope
-from .. import _yaml
from .. import __version__ as bst_version
from .._exceptions import ImplError
from .._message import MessageType
@@ -387,28 +386,27 @@ class LogLine(Widget):
# Element configuration
if "%{config" in format_:
- config = _yaml.node_sanitize(element._Element__config)
line = p.fmt_subst(
line, 'config',
- yaml.round_trip_dump(config, default_flow_style=False, allow_unicode=True))
+ yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True))
# Variables
if "%{vars" in format_:
- variables = _yaml.node_sanitize(element._Element__variables.flat)
+ variables = element._Element__variables.flat
line = p.fmt_subst(
line, 'vars',
yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True))
# Environment
if "%{env" in format_:
- environment = _yaml.node_sanitize(element._Element__environment)
+ environment = element._Element__environment
line = p.fmt_subst(
line, 'env',
yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
# Public
if "%{public" in format_:
- environment = _yaml.node_sanitize(element._Element__public)
+ environment = element._Element__public
line = p.fmt_subst(
line, 'public',
yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 1f539d820..fb6010b1e 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -376,27 +376,27 @@ class _GitSourceBase(Source):
BST_MIRROR_CLASS = _GitMirror
def configure(self, node):
- ref = self.node_get_member(node, str, 'ref', None)
+ ref = node.get_str('ref', None)
config_keys = ['url', 'track', 'ref', 'submodules',
'checkout-submodules', 'ref-format',
'track-tags', 'tags']
- self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
+ node.validate_keys(config_keys + Source.COMMON_CONFIG_KEYS)
- tags_node = self.node_get_member(node, list, 'tags', [])
+ tags_node = node.get_sequence('tags', [])
for tag_node in tags_node:
- self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
+ tag_node.validate_keys(['tag', 'commit', 'annotated'])
tags = self._load_tags(node)
- self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
+ self.track_tags = node.get_bool('track-tags', default=False)
- self.original_url = self.node_get_member(node, str, 'url')
+ self.original_url = node.get_str('url')
self.mirror = self.BST_MIRROR_CLASS(self, '', self.original_url, ref, tags=tags, primary=True)
- self.tracking = self.node_get_member(node, str, 'track', None)
+ self.tracking = node.get_str('track', None)
- self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
+ self.ref_format = node.get_str('ref-format', 'sha1')
if self.ref_format not in ['sha1', 'git-describe']:
- provenance = self.node_provenance(node, member_name='ref-format')
+ provenance = node.get_scalar('ref-format').get_provenance()
raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
# At this point we now know if the source has a ref and/or a track.
@@ -405,17 +405,17 @@ class _GitSourceBase(Source):
raise SourceError("{}: Git sources require a ref and/or track".format(self),
reason="missing-track-and-ref")
- self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
+ self.checkout_submodules = node.get_bool('checkout-submodules', default=True)
self.submodules = []
# Parse a dict of submodule overrides, stored in the submodule_overrides
# and submodule_checkout_overrides dictionaries.
self.submodule_overrides = {}
self.submodule_checkout_overrides = {}
- modules = self.node_get_member(node, dict, 'submodules', {})
- for path, _ in self.node_items(modules):
- submodule = self.node_get_member(modules, dict, path)
- url = self.node_get_member(submodule, str, 'url', None)
+ modules = node.get_mapping('submodules', {})
+ for path in modules.keys():
+ submodule = modules.get_mapping(path)
+ url = submodule.get_str('url', None)
# Make sure to mark all URLs that are specified in the configuration
if url:
@@ -423,7 +423,7 @@ class _GitSourceBase(Source):
self.submodule_overrides[path] = url
if 'checkout' in submodule:
- checkout = self.node_get_member(submodule, bool, 'checkout')
+ checkout = submodule.get_bool('checkout')
self.submodule_checkout_overrides[path] = checkout
self.mark_download_url(self.original_url)
@@ -464,7 +464,7 @@ class _GitSourceBase(Source):
return Consistency.INCONSISTENT
def load_ref(self, node):
- self.mirror.ref = self.node_get_member(node, str, 'ref', None)
+ self.mirror.ref = node.get_str('ref', None)
self.mirror.tags = self._load_tags(node)
def get_ref(self):
@@ -663,11 +663,11 @@ class _GitSourceBase(Source):
def _load_tags(self, node):
tags = []
- tags_node = self.node_get_member(node, list, 'tags', [])
+ tags_node = node.get_sequence('tags', [])
for tag_node in tags_node:
- tag = self.node_get_member(tag_node, str, 'tag')
- commit_ref = self.node_get_member(tag_node, str, 'commit')
- annotated = self.node_get_member(tag_node, bool, 'annotated')
+ tag = tag_node.get_str('tag')
+ commit_ref = tag_node.get_str('commit')
+ annotated = tag_node.get_bool('annotated')
tags.append((tag, commit_ref, annotated))
return tags
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index 8f507b566..75d748723 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -1,5 +1,6 @@
import os
from . import _yaml
+from .node import MappingNode, ScalarNode, SequenceNode
from ._exceptions import LoadError, LoadErrorReason
@@ -35,19 +36,15 @@ class Includes:
if current_loader is None:
current_loader = self._loader
- includes = _yaml.node_get(node, None, '(@)', default_value=None)
- if isinstance(includes, str):
- includes = [includes]
+ includes_node = node.get_node('(@)', allowed_types=[ScalarNode, SequenceNode], allow_none=True)
- if not isinstance(includes, list) and includes is not None:
- provenance = _yaml.node_get_provenance(node, key='(@)')
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: {} must either be list or str".format(provenance, includes))
+ if includes_node:
+ if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
+ includes = [includes_node.as_str()]
+ else:
+ includes = includes_node.as_str_list()
- include_provenance = None
- if includes:
- include_provenance = _yaml.node_get_provenance(node, key='(@)')
- _yaml.node_del(node, '(@)')
+ del node['(@)']
for include in reversed(includes):
if only_local and ':' in include:
@@ -56,6 +53,7 @@ class Includes:
include_node, file_path, sub_loader = self._include_file(include,
current_loader)
except LoadError as e:
+ include_provenance = includes_node.get_provenance()
if e.reason == LoadErrorReason.MISSING_FILE:
message = "{}: Include block references a file that could not be found: '{}'.".format(
include_provenance, include)
@@ -68,13 +66,14 @@ class Includes:
raise
if file_path in included:
+ include_provenance = includes_node.get_provenance()
raise LoadError(LoadErrorReason.RECURSIVE_INCLUDE,
"{}: trying to recursively include {}". format(include_provenance,
file_path))
# Because the included node will be modified, we need
# to copy it so that we do not modify the toplevel
# node of the provenance.
- include_node = _yaml.node_copy(include_node)
+ include_node = include_node.clone()
try:
included.add(file_path)
@@ -84,9 +83,9 @@ class Includes:
finally:
included.remove(file_path)
- _yaml.composite_and_move(node, include_node)
+ include_node._composite_under(node)
- for _, value in _yaml.node_items(node):
+ for value in node.values():
self._process_value(value,
included=included,
current_loader=current_loader,
@@ -132,12 +131,14 @@ class Includes:
included=set(),
current_loader=None,
only_local=False):
- if _yaml.is_node(value):
+ value_type = type(value)
+
+ if value_type is MappingNode:
self.process(value,
included=included,
current_loader=current_loader,
only_local=only_local)
- elif isinstance(value, list):
+ elif value_type is SequenceNode:
for v in value:
self._process_value(v,
included=included,
diff --git a/src/buildstream/_loader/loadelement.py b/src/buildstream/_loader/loadelement.py
index 673bc50ee..773675e2b 100644
--- a/src/buildstream/_loader/loadelement.py
+++ b/src/buildstream/_loader/loadelement.py
@@ -22,8 +22,6 @@ from itertools import count
from pyroaring import BitMap, FrozenBitMap # pylint: disable=no-name-in-module
-from .. import _yaml
-
# LoadElement():
#
@@ -81,7 +79,7 @@ class LoadElement():
self.full_name = self.name
# Ensure the root node is valid
- _yaml.node_validate(self.node, [
+ self.node.validate_keys([
'kind', 'depends', 'sources', 'sandbox',
'variables', 'environment', 'environment-nocache',
'config', 'public', 'description',
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index b221c48d0..5a2624c6a 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -24,6 +24,7 @@ from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency
from .. import _yaml
from ..element import Element
+from ..node import Node
from .._profile import Topics, PROFILER
from .._includes import Includes
@@ -120,7 +121,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
- dummy_target = LoadElement(_yaml.new_empty_node(), "", self)
+ dummy_target = LoadElement(Node.from_dict({}), "", self)
dummy_target.dependencies.extend(
LoadElement.Dependency(element, Symbol.RUNTIME)
for element in target_elements
@@ -227,7 +228,7 @@ class Loader():
message, detail=detail) from e
else:
raise
- kind = _yaml.node_get(node, str, Symbol.KIND)
+ kind = node.get_str(Symbol.KIND)
if kind == "junction":
self._first_pass_options.process_node(node)
else:
@@ -306,7 +307,7 @@ class Loader():
dep_deps = extract_depends_from_node(dep_element.node)
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
- if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
+ if dep_element.node.get_str(Symbol.KIND) == 'junction':
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Cannot depend on junction"
.format(dep.provenance))
@@ -463,36 +464,32 @@ class Loader():
return meta_element
node = element.node
- elt_provenance = _yaml.node_get_provenance(node)
+ elt_provenance = node.get_provenance()
meta_sources = []
- sources = _yaml.node_get(node, list, Symbol.SOURCES, default_value=[])
- element_kind = _yaml.node_get(node, str, Symbol.KIND)
+ sources = node.get_sequence(Symbol.SOURCES, default=[])
+ element_kind = node.get_str(Symbol.KIND)
- # Safe loop calling into _yaml.node_get() for each element ensures
- # we have good error reporting
- for i in range(len(sources)):
- source = _yaml.node_get(node, dict, Symbol.SOURCES, indices=[i])
- kind = _yaml.node_get(source, str, Symbol.KIND)
- _yaml.node_del(source, Symbol.KIND)
+ for index, source in enumerate(sources):
+ kind = source.get_str(Symbol.KIND)
+ del source[Symbol.KIND]
# Directory is optional
- directory = _yaml.node_get(source, str, Symbol.DIRECTORY, default_value=None)
+ directory = source.get_str(Symbol.DIRECTORY, default=None)
if directory:
- _yaml.node_del(source, Symbol.DIRECTORY)
+ del source[Symbol.DIRECTORY]
- index = sources.index(source)
meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
meta_sources.append(meta_source)
meta_element = MetaElement(self.project, element.name, element_kind,
elt_provenance, meta_sources,
- _yaml.node_get(node, dict, Symbol.CONFIG, default_value={}),
- _yaml.node_get(node, dict, Symbol.VARIABLES, default_value={}),
- _yaml.node_get(node, dict, Symbol.ENVIRONMENT, default_value={}),
- _yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
- _yaml.node_get(node, dict, Symbol.PUBLIC, default_value={}),
- _yaml.node_get(node, dict, Symbol.SANDBOX, default_value={}),
+ node.get_mapping(Symbol.CONFIG, default={}),
+ node.get_mapping(Symbol.VARIABLES, default={}),
+ node.get_mapping(Symbol.ENVIRONMENT, default={}),
+ node.get_sequence(Symbol.ENV_NOCACHE, default=[]).as_str_list(),
+ node.get_mapping(Symbol.PUBLIC, default={}),
+ node.get_mapping(Symbol.SANDBOX, default={}),
element_kind == 'junction')
# Cache it now, make sure it's already there before recursing
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 45eb6f4d0..67d2ec771 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -17,7 +17,7 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
+from ..node import Node
class MetaElement():
@@ -48,12 +48,12 @@ class MetaElement():
self.kind = kind
self.provenance = provenance
self.sources = sources
- self.config = config or _yaml.new_empty_node()
- self.variables = variables or _yaml.new_empty_node()
- self.environment = environment or _yaml.new_empty_node()
+ self.config = config or Node.from_dict({})
+ self.variables = variables or Node.from_dict({})
+ self.environment = environment or Node.from_dict({})
self.env_nocache = env_nocache or []
- self.public = public or _yaml.new_empty_node()
- self.sandbox = sandbox or _yaml.new_empty_node()
+ self.public = public or Node.from_dict({})
+ self.sandbox = sandbox or Node.from_dict({})
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
diff --git a/src/buildstream/_loader/types.pyx b/src/buildstream/_loader/types.pyx
index da33d6c54..e8c16b36e 100644
--- a/src/buildstream/_loader/types.pyx
+++ b/src/buildstream/_loader/types.pyx
@@ -18,7 +18,7 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
from .._exceptions import LoadError, LoadErrorReason
-from .. cimport _yaml
+from ..node cimport MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
# Symbol():
@@ -59,48 +59,47 @@ class Symbol():
# dependency was declared
#
cdef class Dependency:
- cdef public _yaml.ProvenanceInformation provenance
+ cdef public ProvenanceInformation provenance
cdef public str name
cdef public str dep_type
cdef public str junction
def __init__(self,
- object dep,
- _yaml.ProvenanceInformation provenance,
+ Node dep,
str default_dep_type=None):
cdef str dep_type
- self.provenance = provenance
+ self.provenance = dep.get_provenance()
- if type(dep) is str:
- self.name = <str> dep
+ if type(dep) is ScalarNode:
+ self.name = dep.as_str()
self.dep_type = default_dep_type
self.junction = None
- elif type(dep) is _yaml.Node and type(dep.value) is dict:
+ elif type(dep) is MappingNode:
if default_dep_type:
- _yaml.node_validate(<_yaml.Node> dep, ['filename', 'junction'])
+ (<MappingNode> dep).validate_keys(['filename', 'junction'])
dep_type = default_dep_type
else:
- _yaml.node_validate(<_yaml.Node> dep, ['filename', 'type', 'junction'])
+ (<MappingNode> dep).validate_keys(['filename', 'type', 'junction'])
# Make type optional, for this we set it to None
- dep_type = <str> _yaml.node_get(<_yaml.Node> dep, str, <str> Symbol.TYPE, None, None)
+ dep_type = (<MappingNode> dep).get_str(<str> Symbol.TYPE, None)
if dep_type is None or dep_type == <str> Symbol.ALL:
dep_type = None
elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
- provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
+ provenance = dep.get_scalar(Symbol.TYPE).get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
.format(provenance, dep_type))
- self.name = <str> _yaml.node_get(<_yaml.Node> dep, str, <str> Symbol.FILENAME)
+ self.name = (<MappingNode> dep).get_str(<str> Symbol.FILENAME)
self.dep_type = dep_type
- self.junction = <str> _yaml.node_get(<_yaml.Node> dep, str, <str> Symbol.JUNCTION, None, None)
+ self.junction = (<MappingNode> dep).get_str(<str> Symbol.JUNCTION, None)
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dependency is not specified as a string or a dictionary".format(provenance))
+ "{}: Dependency is not specified as a string or a dictionary".format(self.provenance))
# `:` characters are not allowed in filename if a junction was
# explicitly specified
@@ -137,20 +136,16 @@ cdef class Dependency:
# default_dep_type (str): type to give to the dependency
# acc (list): a list in which to add the loaded dependencies
#
-cdef void _extract_depends_from_node(_yaml.Node node, str key, str default_dep_type, list acc) except *:
- cdef list depends = <list> _yaml.node_get(node, list, key, None, [])
- cdef int index
- cdef _yaml.ProvenanceInformation dep_provenance
+cdef void _extract_depends_from_node(Node node, str key, str default_dep_type, list acc) except *:
+ cdef SequenceNode depends = node.get_sequence(key, [])
+ cdef Node dep_node
- for index in range(len(depends)):
- # FIXME: the provenance information would be obtainable from the Node directly if we stop
- # stripping provenance and have proper nodes for str elements
- dep_provenance = <_yaml.ProvenanceInformation> _yaml.node_get_provenance(node, key=key, indices=[index])
- dependency = Dependency(depends[index], dep_provenance, default_dep_type=default_dep_type)
+ for dep_node in depends:
+ dependency = Dependency(dep_node, default_dep_type=default_dep_type)
acc.append(dependency)
# Now delete the field, we dont want it anymore
- _yaml.node_del(node, key, safe=True)
+ node.safe_del(key)
# extract_depends_from_node():
@@ -167,7 +162,7 @@ cdef void _extract_depends_from_node(_yaml.Node node, str key, str default_dep_t
# Returns:
# (list): a list of Dependency objects
#
-def extract_depends_from_node(_yaml.Node node):
+def extract_depends_from_node(Node node):
cdef list acc = []
_extract_depends_from_node(node, <str> Symbol.BUILD_DEPENDS, <str> Symbol.BUILD, acc)
_extract_depends_from_node(node, <str> Symbol.RUNTIME_DEPENDS, <str> Symbol.RUNTIME, acc)
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index 511678749..da1191310 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -17,7 +17,7 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
+from ..node import _assert_symbol_name
# Shared symbols for validation purposes
@@ -59,17 +59,14 @@ class Option():
# node (dict): The loaded YAML dictionary describing
# the option
def load(self, node):
-
# We don't use the description, but we do require that options have a
# description.
- _yaml.node_get(node, str, 'description')
-
- self.variable = _yaml.node_get(node, str, 'variable', default_value=None)
+ node.get_str('description')
+ self.variable = node.get_str('variable', default=None)
# Assert valid symbol name for variable name
if self.variable is not None:
- p = _yaml.node_get_provenance(node, 'variable')
- _yaml.assert_symbol_name(p, self.variable, 'variable name')
+ _assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
# load_value()
#
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index e7735eaa2..612ca2aa0 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -17,7 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason, PlatformError
from .._platform import Platform
from .optionenum import OptionEnum
@@ -55,7 +54,7 @@ class OptionArch(OptionEnum):
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
- provenance = _yaml.node_get_provenance(node, key='values', indices=[index])
+ provenance = node.get_sequence('values').scalar_at(index).get_provenance()
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index bdbb1d32a..28ab71278 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -17,7 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason
from .option import Option, OPTION_SYMBOLS
@@ -33,14 +32,14 @@ class OptionBool(Option):
def load(self, node):
super().load(node)
- _yaml.node_validate(node, OPTION_SYMBOLS + ['default'])
- self.value = _yaml.node_get(node, bool, 'default')
+ node.validate_keys(OPTION_SYMBOLS + ['default'])
+ self.value = node.get_bool('default')
def load_value(self, node, *, transform=None):
if transform:
- self.set_value(transform(_yaml.node_get(node, str, self.name)))
+ self.set_value(transform(node.get_str(self.name)))
else:
- self.value = _yaml.node_get(node, bool, self.name)
+ self.value = node.get_bool(self.name)
def set_value(self, value):
if value in ('True', 'true'):
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 889db965c..d1a7a85c9 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -17,7 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason
from .option import Option, OPTION_SYMBOLS
@@ -44,22 +43,23 @@ class OptionEnum(Option):
if allow_default_definition:
valid_symbols += ['default']
- _yaml.node_validate(node, valid_symbols)
+ node.validate_keys(valid_symbols)
- self.values = _yaml.node_get(node, list, 'values', default_value=[])
+ self.values = node.get_sequence('values', default=[]).as_str_list()
if not self.values:
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: No values specified for {} option '{}'"
- .format(_yaml.node_get_provenance(node), self.OPTION_TYPE, self.name))
+ .format(node.get_provenance(), self.OPTION_TYPE, self.name))
# Allow subclass to define the default value
self.value = self.load_default_value(node)
def load_value(self, node, *, transform=None):
- self.value = _yaml.node_get(node, str, self.name)
+ value_node = node.get_scalar(self.name)
+ self.value = value_node.as_str()
if transform:
self.value = transform(self.value)
- self.validate(self.value, _yaml.node_get_provenance(node, self.name))
+ self.validate(self.value, value_node)
def set_value(self, value):
self.validate(value)
@@ -68,17 +68,20 @@ class OptionEnum(Option):
def get_value(self):
return self.value
- def validate(self, value, provenance=None):
+ def validate(self, value, node=None):
if value not in self.values:
- prefix = ""
- if provenance:
+ if node is not None:
+ provenance = node.get_provenance()
prefix = "{}: ".format(provenance)
+ else:
+ prefix = ""
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}Invalid value for {} option '{}': {}\n"
.format(prefix, self.OPTION_TYPE, self.name, value) +
"Valid values: {}".format(", ".join(self.values)))
def load_default_value(self, node):
- value = _yaml.node_get(node, str, 'default')
- self.validate(value, _yaml.node_get_provenance(node, 'default'))
+ value_node = node.get_scalar('default')
+ value = value_node.as_str()
+ self.validate(value, value_node)
return value
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index eba3a8dd5..80dd1b55d 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -17,7 +17,6 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason
from .option import Option, OPTION_SYMBOLS
@@ -44,24 +43,26 @@ class OptionFlags(Option):
if allow_value_definitions:
valid_symbols += ['values']
- _yaml.node_validate(node, valid_symbols)
+ node.validate_keys(valid_symbols)
# Allow subclass to define the valid values
self.values = self.load_valid_values(node)
if not self.values:
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: No values specified for {} option '{}'"
- .format(_yaml.node_get_provenance(node), self.OPTION_TYPE, self.name))
+ .format(node.get_provenance(), self.OPTION_TYPE, self.name))
- self.value = _yaml.node_get(node, list, 'default', default_value=[])
- self.validate(self.value, _yaml.node_get_provenance(node, 'default'))
+ value_node = node.get_sequence('default', default=[])
+ self.value = value_node.as_str_list()
+ self.validate(self.value, value_node)
def load_value(self, node, *, transform=None):
- self.value = _yaml.node_get(node, list, self.name)
+ value_node = node.get_sequence(self.name)
+ self.value = value_node.as_str_list()
if transform:
self.value = [transform(x) for x in self.value]
self.value = sorted(self.value)
- self.validate(self.value, _yaml.node_get_provenance(node, self.name))
+ self.validate(self.value, value_node)
def set_value(self, value):
# Strip out all whitespace, allowing: "value1, value2 , value3"
@@ -76,12 +77,14 @@ class OptionFlags(Option):
def get_value(self):
return ",".join(self.value)
- def validate(self, value, provenance=None):
+ def validate(self, value, node=None):
for flag in value:
if flag not in self.values:
- prefix = ""
- if provenance:
+ if node is not None:
+ provenance = node.get_provenance()
prefix = "{}: ".format(provenance)
+ else:
+ prefix = ""
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}Invalid value for flags option '{}': {}\n"
.format(prefix, self.name, value) +
@@ -90,4 +93,4 @@ class OptionFlags(Option):
def load_valid_values(self, node):
# Allow the more descriptive error to raise when no values
# exist rather than bailing out here (by specifying default_value)
- return _yaml.node_get(node, list, 'values', default_value=[])
+ return node.get_sequence('values', default=[]).as_str_list()
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index de3af3e15..d7541530b 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -20,8 +20,8 @@
import jinja2
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason
+from ..node import MappingNode, SequenceNode, _assert_symbol_name
from .optionbool import OptionBool
from .optionenum import OptionEnum
from .optionflags import OptionFlags
@@ -65,17 +65,16 @@ class OptionPool():
#
def load(self, options):
- for option_name, option_definition in _yaml.node_items(options):
+ for option_name, option_definition in options.items():
# Assert that the option name is a valid symbol
- p = _yaml.node_get_provenance(options, option_name)
- _yaml.assert_symbol_name(p, option_name, "option name", allow_dashes=False)
+ _assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
- opt_type_name = _yaml.node_get(option_definition, str, 'type')
+ opt_type_name = option_definition.get_str('type')
try:
opt_type = _OPTION_TYPES[opt_type_name]
except KeyError:
- p = _yaml.node_get_provenance(option_definition, 'type')
+ p = option_definition.get_scalar('type').get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Invalid option type '{}'".format(p, opt_type_name))
@@ -91,11 +90,11 @@ class OptionPool():
# node (dict): The loaded YAML options
#
def load_yaml_values(self, node, *, transform=None):
- for option_name in _yaml.node_keys(node):
+ for option_name, option_value in node.items():
try:
option = self._options[option_name]
except KeyError as e:
- p = _yaml.node_get_provenance(node, option_name)
+ p = option_value.get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Unknown option '{}' specified"
.format(p, option_name)) from e
@@ -152,7 +151,7 @@ class OptionPool():
def export_variables(self, variables):
for _, option in self._options.items():
if option.variable:
- _yaml.node_set(variables, option.variable, option.get_value())
+ variables[option.variable] = option.get_value()
# printable_variables()
#
@@ -185,10 +184,11 @@ class OptionPool():
# Now recurse into nested dictionaries and lists
# and process any indirectly nested conditionals.
#
- for _, value in _yaml.node_items(node):
- if _yaml.is_node(value):
+ for value in node.values():
+ value_type = type(value)
+ if value_type is MappingNode:
self.process_node(value)
- elif isinstance(value, list):
+ elif value_type is SequenceNode:
self._process_list(value)
#######################################################
@@ -237,9 +237,10 @@ class OptionPool():
#
def _process_list(self, values):
for value in values:
- if _yaml.is_node(value):
+ value_type = type(value)
+ if value_type is MappingNode:
self.process_node(value)
- elif isinstance(value, list):
+ elif value_type is SequenceNode:
self._process_list(value)
# Process a single conditional, resulting in composition
@@ -248,47 +249,43 @@ class OptionPool():
# Return true if a conditional was processed.
#
def _process_one_node(self, node):
- conditions = _yaml.node_get(node, list, '(?)', default_value=None)
- assertion = _yaml.node_get(node, str, '(!)', default_value=None)
+ conditions = node.get_sequence('(?)', default=None)
+ assertion = node.get_str('(!)', default=None)
# Process assersions first, we want to abort on the first encountered
# assertion in a given dictionary, and not lose an assertion due to
# it being overwritten by a later assertion which might also trigger.
if assertion is not None:
- p = _yaml.node_get_provenance(node, '(!)')
+ p = node.get_scalar('(!)').get_provenance()
raise LoadError(LoadErrorReason.USER_ASSERTION,
"{}: {}".format(p, assertion.strip()))
if conditions is not None:
+ del node['(?)']
- # Collect provenance first, we need to delete the (?) key
- # before any composition occurs.
- provenance = [
- _yaml.node_get_provenance(node, '(?)', indices=[i])
- for i in range(len(conditions))
- ]
- _yaml.node_del(node, '(?)')
-
- for condition, p in zip(conditions, provenance):
- tuples = list(_yaml.node_items(condition))
+ for condition in conditions:
+ tuples = list(condition.items())
if len(tuples) > 1:
+ provenance = condition.get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Conditional statement has more than one key".format(p))
+ "{}: Conditional statement has more than one key".format(provenance))
expression, value = tuples[0]
try:
apply_fragment = self._evaluate(expression)
except LoadError as e:
# Prepend the provenance of the error
- raise LoadError(e.reason, "{}: {}".format(p, e)) from e
+ provenance = condition.get_provenance()
+ raise LoadError(e.reason, "{}: {}".format(provenance, e)) from e
- if not _yaml.is_node(value):
+ if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
+ provenance = condition.get_provenance()
raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
- "{}: Only values of type 'dict' can be composed.".format(p))
+ "{}: Only values of type 'dict' can be composed.".format(provenance))
# Apply the yaml fragment if its condition evaluates to true
if apply_fragment:
- _yaml.composite(node, value)
+ value._composite(node)
return True
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index 7fef9b9f4..2442e306f 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -22,7 +22,6 @@ import inspect
from ._exceptions import PluginError, LoadError, LoadErrorReason
from . import utils
-from . import _yaml
# A Context for loading plugin types
@@ -138,19 +137,19 @@ class PluginContext():
loaded_dependency = False
for origin in self._plugin_origins:
- if kind not in _yaml.node_get(origin, list, 'plugins'):
+ if kind not in origin.get_sequence('plugins').as_str_list():
continue
- if _yaml.node_get(origin, str, 'origin') == 'local':
- local_path = _yaml.node_get(origin, str, 'path')
+ if origin.get_str('origin') == 'local':
+ local_path = origin.get_str('path')
source = self._get_local_plugin_source(local_path)
- elif _yaml.node_get(origin, str, 'origin') == 'pip':
- package_name = _yaml.node_get(origin, str, 'package-name')
+ elif origin.get_str('origin') == 'pip':
+ package_name = origin.get_str('package-name')
source, defaults = self._get_pip_plugin_source(package_name, kind)
else:
raise PluginError("Failed to load plugin '{}': "
"Unexpected plugin origin '{}'"
- .format(kind, _yaml.node_get(origin, str, 'origin')))
+ .format(kind, origin.get_str('origin')))
loaded_dependency = True
break
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 5f433c090..97a2c4f77 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -33,6 +33,7 @@ from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
+from .node import ScalarNode, SequenceNode, _assert_symbol_name
from .sandbox import SandboxRemote
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
@@ -84,7 +85,7 @@ class ProjectConfig:
self.source_overrides = {} # Source specific configurations
self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
self.default_mirror = None # The name of the preferred mirror.
- self._aliases = {} # Aliases dictionary
+ self._aliases = None # Aliases dictionary
# Project()
@@ -200,7 +201,7 @@ class Project():
if url and utils._ALIAS_SEPARATOR in url:
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
- alias_url = _yaml.node_get(config._aliases, str, url_alias, default_value=None)
+ alias_url = config._aliases.get_str(url_alias, default=None)
if alias_url:
url = alias_url + url_body
@@ -230,7 +231,7 @@ class Project():
# Anything that alters the build goes into the unique key
# (currently nothing here)
- self._cache_key = _cachekey.generate_key(_yaml.new_empty_node())
+ self._cache_key = _cachekey.generate_key({})
return self._cache_key
@@ -247,8 +248,7 @@ class Project():
# will always be raised if both parameters are set to ``True``.
#
# Args:
- # node (dict): A dictionary loaded from YAML
- # key (str): The key whose value contains a path to validate
+ # node (ScalarNode): A Node loaded from YAML containing the path to validate
# check_is_file (bool): If ``True`` an error will also be raised
# if path does not point to a regular file.
# Defaults to ``False``
@@ -262,21 +262,21 @@ class Project():
# (LoadError): In case that the project path is not valid or does not
# exist
#
- def get_path_from_node(self, node, key, *,
+ def get_path_from_node(self, node, *,
check_is_file=False, check_is_dir=False):
- path_str = _yaml.node_get(node, str, key)
+ path_str = node.as_str()
path = Path(path_str)
full_path = self._absolute_directory_path / path
- provenance = _yaml.node_get_provenance(node, key=key)
-
if full_path.is_symlink():
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
"{}: Specified path '{}' must not point to "
"symbolic links "
.format(provenance, path_str))
if path.parts and path.parts[0] == '..':
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID,
"{}: Specified path '{}' first component must "
"not be '..'"
@@ -288,6 +288,7 @@ class Project():
else:
full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.MISSING_FILE,
"{}: Specified path '{}' does not exist"
.format(provenance, path_str))
@@ -296,12 +297,14 @@ class Project():
full_resolved_path == self._absolute_directory_path)
if not is_inside:
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID,
"{}: Specified path '{}' must not lead outside of the "
"project directory"
.format(provenance, path_str))
if path.is_absolute():
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID,
"{}: Absolute path: '{}' invalid.\n"
"Please specify a path relative to the project's root."
@@ -310,17 +313,20 @@ class Project():
if full_resolved_path.is_socket() or (
full_resolved_path.is_fifo() or
full_resolved_path.is_block_device()):
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
"{}: Specified path '{}' points to an unsupported "
"file kind"
.format(provenance, path_str))
if check_is_file and not full_resolved_path.is_file():
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
"{}: Specified path '{}' is not a regular file"
.format(provenance, path_str))
if check_is_dir and not full_resolved_path.is_dir():
+ provenance = node.get_provenance()
raise LoadError(LoadErrorReason.PROJ_PATH_INVALID_KIND,
"{}: Specified path '{}' is not a directory"
.format(provenance, path_str))
@@ -328,7 +334,7 @@ class Project():
return path_str
def _validate_node(self, node):
- _yaml.node_validate(node, [
+ node.validate_keys([
'format-version',
'element-path', 'variables',
'environment', 'environment-nocache',
@@ -404,7 +410,7 @@ class Project():
else:
config = self.config
- return _yaml.node_get(config._aliases, str, alias, default_value=None)
+ return config._aliases.get_str(alias, default=None)
# get_alias_uris()
#
@@ -419,7 +425,7 @@ class Project():
else:
config = self.config
- if not alias or alias not in config._aliases:
+ if not alias or alias not in config._aliases: # pylint: disable=unsupported-membership-test
return [None]
mirror_list = []
@@ -429,7 +435,7 @@ class Project():
mirror_list = alias_mapping[alias] + mirror_list
else:
mirror_list += alias_mapping[alias]
- mirror_list.append(_yaml.node_get(config._aliases, str, alias))
+ mirror_list.append(config._aliases.get_str(alias))
return mirror_list
# load_elements()
@@ -569,11 +575,11 @@ class Project():
else:
raise
- pre_config_node = _yaml.node_copy(self._default_config_node)
- _yaml.composite(pre_config_node, self._project_conf)
+ pre_config_node = self._default_config_node.clone()
+ self._project_conf._composite(pre_config_node)
# Assert project's format version early, before validating toplevel keys
- format_version = _yaml.node_get(pre_config_node, int, 'format-version')
+ format_version = pre_config_node.get_int('format-version')
if BST_FORMAT_VERSION < format_version:
major, minor = utils.get_bst_version()
raise LoadError(
@@ -585,45 +591,47 @@ class Project():
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
- self.name = _yaml.node_get(self._project_conf, str, 'name')
+ # FIXME: we should be keeping node information for further composition here
+ self.name = self._project_conf.get_str('name')
# Validate that project name is a valid symbol name
- _yaml.assert_symbol_name(_yaml.node_get_provenance(pre_config_node, 'name'),
- self.name, "project name")
+ _assert_symbol_name(self.name, "project name",
+ ref_node=pre_config_node.get_node('name'))
self.element_path = os.path.join(
self.directory,
- self.get_path_from_node(pre_config_node, 'element-path',
+ self.get_path_from_node(pre_config_node.get_scalar('element-path'),
check_is_dir=True)
)
self.config.options = OptionPool(self.element_path)
self.first_pass_config.options = OptionPool(self.element_path)
- defaults = _yaml.node_get(pre_config_node, dict, 'defaults')
- _yaml.node_validate(defaults, ['targets'])
- self._default_targets = _yaml.node_get(defaults, list, "targets")
+ defaults = pre_config_node.get_mapping('defaults')
+ defaults.validate_keys(['targets'])
+ self._default_targets = defaults.get_sequence("targets").as_str_list()
# Fatal warnings
- self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
+ self._fatal_warnings = pre_config_node.get_sequence('fatal-warnings', default=[]).as_str_list()
self.loader = Loader(self._context, self,
parent=parent_loader, fetch_subprojects=fetch_subprojects)
self._project_includes = Includes(self.loader, copy_tree=False)
- project_conf_first_pass = _yaml.node_copy(self._project_conf)
+ project_conf_first_pass = self._project_conf.clone()
self._project_includes.process(project_conf_first_pass, only_local=True)
- config_no_include = _yaml.node_copy(self._default_config_node)
- _yaml.composite(config_no_include, project_conf_first_pass)
+ config_no_include = self._default_config_node.clone()
+ project_conf_first_pass._composite(config_no_include)
self._load_pass(config_no_include, self.first_pass_config,
ignore_unknown=True)
# Use separate file for storing source references
- self.ref_storage = _yaml.node_get(pre_config_node, str, 'ref-storage')
+ ref_storage_node = pre_config_node.get_scalar('ref-storage')
+ self.ref_storage = ref_storage_node.as_str()
if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
- p = _yaml.node_get_provenance(pre_config_node, 'ref-storage')
+ p = ref_storage_node.get_provenance()
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Invalid value '{}' specified for ref-storage"
.format(p, self.ref_storage))
@@ -636,10 +644,10 @@ class Project():
# Process the second pass of loading the project configuration.
#
def _load_second_pass(self):
- project_conf_second_pass = _yaml.node_copy(self._project_conf)
+ project_conf_second_pass = self._project_conf.clone()
self._project_includes.process(project_conf_second_pass)
- config = _yaml.node_copy(self._default_config_node)
- _yaml.composite(config, project_conf_second_pass)
+ config = self._default_config_node.clone()
+ project_conf_second_pass._composite(config)
self._load_pass(config, self.config)
@@ -677,23 +685,20 @@ class Project():
self.remote_execution_specs = self._context.remote_execution_specs
# Load sandbox environment variables
- self.base_environment = _yaml.node_get(config, dict, 'environment')
- self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
+ self.base_environment = config.get_mapping('environment')
+ self.base_env_nocache = config.get_sequence('environment-nocache').as_str_list()
# Load sandbox configuration
- self._sandbox = _yaml.node_get(config, dict, 'sandbox')
+ self._sandbox = config.get_mapping('sandbox')
# Load project split rules
- self._splits = _yaml.node_get(config, dict, 'split-rules')
+ self._splits = config.get_mapping('split-rules')
# Support backwards compatibility for fail-on-overlap
- fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap', default_value=None)
-
- if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap:
- self._fatal_warnings.append(CoreWarnings.OVERLAPS)
+ fail_on_overlap = config.get_scalar('fail-on-overlap', None)
# Deprecation check
- if fail_on_overlap is not None:
+ if not fail_on_overlap.is_none():
self._context.messenger.message(
Message(
None,
@@ -703,36 +708,37 @@ class Project():
)
)
+ if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap.as_bool():
+ self._fatal_warnings.append(CoreWarnings.OVERLAPS)
+
# Load project.refs if it exists, this may be ignored.
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
self.refs.load(self.options)
# Parse shell options
- shell_options = _yaml.node_get(config, dict, 'shell')
- _yaml.node_validate(shell_options, ['command', 'environment', 'host-files'])
- self._shell_command = _yaml.node_get(shell_options, list, 'command')
+ shell_options = config.get_mapping('shell')
+ shell_options.validate_keys(['command', 'environment', 'host-files'])
+ self._shell_command = shell_options.get_sequence('command').as_str_list()
# Perform environment expansion right away
- shell_environment = _yaml.node_get(shell_options, dict, 'environment', default_value={})
- for key in _yaml.node_keys(shell_environment):
- value = _yaml.node_get(shell_environment, str, key)
+ shell_environment = shell_options.get_mapping('environment', default={})
+ for key in shell_environment.keys():
+ value = shell_environment.get_str(key)
self._shell_environment[key] = os.path.expandvars(value)
# Host files is parsed as a list for convenience
- host_files = _yaml.node_get(shell_options, list, 'host-files', default_value=[])
+ host_files = shell_options.get_sequence('host-files', default=[])
for host_file in host_files:
- if isinstance(host_file, str):
+ if isinstance(host_file, ScalarNode):
mount = HostMount(host_file)
else:
# Some validation
- index = host_files.index(host_file)
- host_file_desc = _yaml.node_get(shell_options, dict, 'host-files', indices=[index])
- _yaml.node_validate(host_file_desc, ['path', 'host_path', 'optional'])
+ host_file.validate_keys(['path', 'host_path', 'optional'])
# Parse the host mount
- path = _yaml.node_get(host_file_desc, str, 'path')
- host_path = _yaml.node_get(host_file_desc, str, 'host_path', default_value=None)
- optional = _yaml.node_get(host_file_desc, bool, 'optional', default_value=False)
+ path = host_file.get_str('path')
+ host_path = host_file.get_str('host_path', default=None)
+ optional = host_file.get_bool('optional', default=False)
mount = HostMount(path, host_path, optional)
self._shell_host_files.append(mount)
@@ -753,16 +759,16 @@ class Project():
# Element and Source type configurations will be composited later onto
# element/source types, so we delete it from here and run our final
# assertion after.
- output.element_overrides = _yaml.node_get(config, dict, 'elements', default_value={})
- output.source_overrides = _yaml.node_get(config, dict, 'sources', default_value={})
- _yaml.node_del(config, 'elements', safe=True)
- _yaml.node_del(config, 'sources', safe=True)
- _yaml.node_final_assertions(config)
+ output.element_overrides = config.get_mapping('elements', default={})
+ output.source_overrides = config.get_mapping('sources', default={})
+ config.safe_del('elements')
+ config.safe_del('sources')
+ config._assert_fully_composited()
self._load_plugin_factories(config, output)
# Load project options
- options_node = _yaml.node_get(config, dict, 'options', default_value={})
+ options_node = config.get_mapping('options', default={})
output.options.load(options_node)
if self.junction:
# load before user configuration
@@ -770,7 +776,7 @@ class Project():
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
- override_options = _yaml.node_get(overrides, dict, 'options', default_value={})
+ override_options = overrides.get_mapping('options', default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
@@ -789,10 +795,10 @@ class Project():
output.options.process_node(output.source_overrides)
# Load base variables
- output.base_variables = _yaml.node_get(config, dict, 'variables')
+ output.base_variables = config.get_mapping('variables')
# Add the project name as a default variable
- _yaml.node_set(output.base_variables, 'project-name', self.name)
+ output.base_variables['project-name'] = self.name
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -800,32 +806,32 @@ class Project():
# max-jobs value seems to be around 8-10 if we have enough cores
# users should set values based on workload and build infrastructure
platform = Platform.get_platform()
- _yaml.node_set(output.base_variables, 'max-jobs', str(platform.get_cpu_count(8)))
+ output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
- output.default_mirror = self._default_mirror or _yaml.node_get(overrides, str,
- 'default-mirror', default_value=None)
+ output.default_mirror = self._default_mirror or overrides.get_str(
+ 'default-mirror', default=None)
- mirrors = _yaml.node_get(config, list, 'mirrors', default_value=[])
+ mirrors = config.get_sequence('mirrors', default=[])
for mirror in mirrors:
allowed_mirror_fields = [
'name', 'aliases'
]
- _yaml.node_validate(mirror, allowed_mirror_fields)
- mirror_name = _yaml.node_get(mirror, str, 'name')
+ mirror.validate_keys(allowed_mirror_fields)
+ mirror_name = mirror.get_str('name')
alias_mappings = {}
- for alias_mapping, uris in _yaml.node_items(_yaml.node_get(mirror, dict, 'aliases')):
- assert isinstance(uris, list)
- alias_mappings[alias_mapping] = list(uris)
+ for alias_mapping, uris in mirror.get_mapping('aliases').items():
+ assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
+ alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
output.default_mirror = mirror_name
# Source url aliases
- output._aliases = _yaml.node_get(config, dict, 'aliases', default_value={})
+ output._aliases = config.get_mapping('aliases', default={})
# _find_project_dir()
#
@@ -869,7 +875,7 @@ class Project():
plugin_element_origins = [] # Origins of custom elements
# Plugin origins and versions
- origins = _yaml.node_get(config, list, 'plugins', default_value=[])
+ origins = config.get_sequence('plugins', default=[])
source_format_versions = {}
element_format_versions = {}
for origin in origins:
@@ -878,9 +884,9 @@ class Project():
'package-name', 'path',
]
allowed_origins = ['core', 'local', 'pip']
- _yaml.node_validate(origin, allowed_origin_fields)
+ origin.validate_keys(allowed_origin_fields)
- origin_value = _yaml.node_get(origin, str, 'origin')
+ origin_value = origin.get_str('origin')
if origin_value not in allowed_origins:
raise LoadError(
LoadErrorReason.INVALID_YAML,
@@ -888,26 +894,26 @@ class Project():
.format(origin_value))
# Store source versions for checking later
- source_versions = _yaml.node_get(origin, dict, 'sources', default_value={})
- for key in _yaml.node_keys(source_versions):
+ source_versions = origin.get_mapping('sources', default={})
+ for key in source_versions.keys():
if key in source_format_versions:
raise LoadError(
LoadErrorReason.INVALID_YAML,
"Duplicate listing of source '{}'".format(key))
- source_format_versions[key] = _yaml.node_get(source_versions, int, key)
+ source_format_versions[key] = source_versions.get_int(key)
# Store element versions for checking later
- element_versions = _yaml.node_get(origin, dict, 'elements', default_value={})
- for key in _yaml.node_keys(element_versions):
+ element_versions = origin.get_mapping('elements', default={})
+ for key in element_versions.keys():
if key in element_format_versions:
raise LoadError(
LoadErrorReason.INVALID_YAML,
"Duplicate listing of element '{}'".format(key))
- element_format_versions[key] = _yaml.node_get(element_versions, int, key)
+ element_format_versions[key] = element_versions.get_int(key)
# Store the origins if they're not 'core'.
# core elements are loaded by default, so storing is unnecessary.
- if _yaml.node_get(origin, str, 'origin') != 'core':
+ if origin.get_str('origin') != 'core':
self._store_origin(origin, 'sources', plugin_source_origins)
self._store_origin(origin, 'elements', plugin_element_origins)
@@ -938,20 +944,20 @@ class Project():
raise LoadError(LoadErrorReason.INVALID_DATA,
"Unexpected plugin group: {}, expecting {}"
.format(plugin_group, expected_groups))
- node_keys = [key for key in _yaml.node_keys(origin)]
- if plugin_group in node_keys:
- origin_node = _yaml.node_copy(origin)
- plugins = _yaml.node_get(origin, dict, plugin_group, default_value={})
- _yaml.node_set(origin_node, 'plugins', [k for k in _yaml.node_keys(plugins)])
+ if plugin_group in origin.keys():
+ origin_node = origin.clone()
+ plugins = origin.get_mapping(plugin_group, default={})
+ origin_node['plugins'] = plugins.keys()
+
for group in expected_groups:
if group in origin_node:
- _yaml.node_del(origin_node, group)
+ del origin_node[group]
- if _yaml.node_get(origin_node, str, 'origin') == 'local':
- path = self.get_path_from_node(origin, 'path',
+ if origin_node.get_str('origin') == 'local':
+ path = self.get_path_from_node(origin.get_scalar('path'),
check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
- _yaml.node_set(origin_node, 'path', os.path.join(self.directory, path))
+ origin_node['path'] = os.path.join(self.directory, path)
destination.append(origin_node)
# _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 09205a7c3..0555488c8 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -19,6 +19,7 @@
import os
from . import _yaml
+from .node import _new_synthetic_file
from ._exceptions import LoadError, LoadErrorReason
@@ -63,15 +64,15 @@ class ProjectRefs():
def load(self, options):
try:
self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
- provenance = _yaml.node_get_provenance(self._toplevel_node)
- self._toplevel_save = provenance.toplevel
+ provenance = self._toplevel_node.get_provenance()
+ self._toplevel_save = provenance._toplevel
# Process any project options immediately
options.process_node(self._toplevel_node)
# Run any final assertions on the project.refs, just incase there
# are list composition directives or anything left unprocessed.
- _yaml.node_final_assertions(self._toplevel_node)
+ self._toplevel_node._assert_fully_composited()
except LoadError as e:
if e.reason != LoadErrorReason.MISSING_FILE:
@@ -79,15 +80,15 @@ class ProjectRefs():
# Ignore failure if the file doesnt exist, it'll be created and
# for now just assumed to be empty
- self._toplevel_node = _yaml.new_synthetic_file(self._fullpath)
+ self._toplevel_node = _new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
- _yaml.node_validate(self._toplevel_node, ['projects'])
+ self._toplevel_node.validate_keys(['projects'])
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
if 'projects' not in node:
- _yaml.node_set(node, 'projects', _yaml.new_empty_node(ref_node=node))
+ node['projects'] = {}
# lookup_ref()
#
@@ -121,35 +122,34 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
+ projects = toplevel.get_mapping('projects')
+
# Fetch the project
try:
- projects = _yaml.node_get(toplevel, dict, 'projects')
- project_node = _yaml.node_get(projects, dict, project)
+ project_node = projects.get_mapping(project)
except LoadError:
if not ensure:
return None
- project_node = _yaml.new_empty_node(ref_node=projects)
- _yaml.node_set(projects, project, project_node)
+ projects[project] = {}
+ project_node = projects.get_mapping(project)
# Fetch the element
try:
- element_list = _yaml.node_get(project_node, list, element)
+ element_list = project_node.get_sequence(element)
except LoadError:
if not ensure:
return None
- element_list = []
- _yaml.node_set(project_node, element, element_list)
+ project_node[element] = []
+ element_list = project_node.get_sequence(element)
# Fetch the source index
try:
- node = element_list[source_index]
+ node = element_list.mapping_at(source_index)
except IndexError:
if not ensure:
return None
- # Pad the list with empty newly created dictionaries
- _yaml.node_extend_list(project_node, element, source_index + 1, {})
-
- node = _yaml.node_get(project_node, dict, element, indices=[source_index])
+ element_list.append({})
+ node = element_list.mapping_at(source_index)
return node
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 705e0a5d3..3c32ff616 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -930,7 +930,7 @@ class Stream():
}
workspaces.append(workspace_detail)
- _yaml.dump({
+ _yaml.roundtrip_dump({
'workspaces': workspaces
})
diff --git a/src/buildstream/_variables.pyx b/src/buildstream/_variables.pyx
index 9b8b5a902..470feddc9 100644
--- a/src/buildstream/_variables.pyx
+++ b/src/buildstream/_variables.pyx
@@ -24,7 +24,7 @@ import re
import sys
from ._exceptions import LoadError, LoadErrorReason
-from . cimport _yaml
+from .node cimport MappingNode
# Variables are allowed to have dashes here
#
@@ -65,11 +65,11 @@ PARSE_EXPANSION = re.compile(r"\%\{([a-zA-Z][a-zA-Z0-9_-]*)\}")
#
cdef class Variables:
- cdef _yaml.Node original
+ cdef MappingNode original
cdef dict _expstr_map
cdef public dict flat
- def __init__(self, _yaml.Node node):
+ def __init__(self, MappingNode node):
self.original = node
self._expstr_map = self._resolve(node)
self.flat = self._flatten()
@@ -115,20 +115,20 @@ cdef class Variables:
#
# Here we resolve all of our inputs into a dictionary, ready for use
# in subst()
- cdef dict _resolve(self, _yaml.Node node):
+ cdef dict _resolve(self, MappingNode node):
# Special case, if notparallel is specified in the variables for this
# element, then override max-jobs to be 1.
# Initialize it as a string as all variables are processed as strings.
#
- if _yaml.node_get(node, bool, 'notparallel', None, False):
- _yaml.node_set(node, 'max-jobs', str(1))
+ if node.get_bool('notparallel', False):
+ node['max-jobs'] = str(1)
cdef dict ret = {}
cdef str key
cdef str value
- for key in _yaml.node_keys(node):
- value = <str> _yaml.node_get(node, str, key)
+ for key in node.keys():
+ value = node.get_str(key)
ret[sys.intern(key)] = _parse_expstr(value)
return ret
@@ -139,7 +139,7 @@ cdef class Variables:
for var in expstr[1::2]:
if var not in self._expstr_map:
line = " unresolved variable '{unmatched}' in declaration of '{variable}' at: {provenance}"
- provenance = _yaml.node_get_provenance(self.original, key)
+ provenance = expstr.get_provenance()
summary.append(line.format(unmatched=var, variable=key, provenance=provenance))
if summary:
raise LoadError(LoadErrorReason.UNRESOLVED_VARIABLE,
@@ -153,7 +153,7 @@ cdef class Variables:
continue
if var in visited:
raise LoadError(LoadErrorReason.RECURSIVE_VARIABLE,
- "{}: ".format(_yaml.node_get_provenance(self.original, var)) +
+ "{}: ".format(self.original.get_scalar(var).get_provenance()) +
("Variable '{}' expands to contain a reference to itself. " +
"Perhaps '{}' contains '%{{{}}}").format(var, visited[-1], var))
visited.append(var)
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 9fbfb7e63..2cda5a215 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -21,6 +21,7 @@ import os
from . import utils
from . import _yaml
+from .node import MappingNode, ScalarNode
from ._exceptions import LoadError, LoadErrorReason
@@ -114,7 +115,8 @@ class WorkspaceProject():
def load(cls, directory):
workspace_file = os.path.join(directory, WORKSPACE_PROJECT_FILE)
if os.path.exists(workspace_file):
- data_dict = _yaml.node_sanitize(_yaml.roundtrip_load(workspace_file), dict_type=dict)
+ data_dict = _yaml.roundtrip_load(workspace_file)
+
return cls.from_dict(directory, data_dict)
else:
return None
@@ -125,7 +127,7 @@ class WorkspaceProject():
#
def write(self):
os.makedirs(self._directory, exist_ok=True)
- _yaml.dump(self.to_dict(), self.get_filename())
+ _yaml.roundtrip_dump(self.to_dict(), self.get_filename())
# get_filename()
#
@@ -530,7 +532,7 @@ class Workspaces():
}
}
os.makedirs(self._bst_directory, exist_ok=True)
- _yaml.dump(config, self._get_filename())
+ _yaml.roundtrip_dump(config, self._get_filename())
# _load_config()
#
@@ -570,23 +572,21 @@ class Workspaces():
#
def _parse_workspace_config(self, workspaces):
try:
- version = _yaml.node_get(workspaces, int, 'format-version', default_value=0)
+ version = workspaces.get_int('format-version', default=0)
except ValueError:
raise LoadError(LoadErrorReason.INVALID_DATA,
"Format version is not an integer in workspace configuration")
if version == 0:
# Pre-versioning format can be of two forms
- for element, config in _yaml.node_items(workspaces):
- if _yaml.is_node(config):
- # Get a dict
- config = _yaml.node_sanitize(config, dict_type=dict)
+ for element, config in workspaces.items():
+ config_type = type(config)
- if isinstance(config, str):
+ if config_type is ScalarNode:
pass
- elif isinstance(config, dict):
- sources = list(config.items())
+ elif config_type is MappingNode:
+ sources = list(config.values())
if len(sources) > 1:
detail = "There are multiple workspaces open for '{}'.\n" + \
"This is not supported anymore.\n" + \
@@ -594,22 +594,21 @@ class Workspaces():
raise LoadError(LoadErrorReason.INVALID_DATA,
detail.format(element, self._get_filename()))
- _yaml.node_set(workspaces, element, sources[0][1])
+ workspaces[element] = sources[0]
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
"Workspace config is in unexpected format.")
res = {
- element: Workspace(self._toplevel_project, path=config)
- for element, config in _yaml.node_items(workspaces)
+ element: Workspace(self._toplevel_project, path=config.as_str())
+ for element, config in workspaces.items()
}
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
- workspaces = _yaml.node_get(workspaces, dict, "workspaces",
- default_value=_yaml.new_empty_node())
+ workspaces = workspaces.get_mapping("workspaces", default={})
res = {element: self._load_workspace(node)
- for element, node in _yaml.node_items(workspaces)}
+ for element, node in workspaces.items()}
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -630,13 +629,15 @@ class Workspaces():
# (Workspace): A newly instantiated Workspace
#
def _load_workspace(self, node):
+ running_files = node.get_mapping('running_files', default=None)
+ if running_files:
+ running_files = running_files._strip_node_info()
+
dictionary = {
- 'prepared': _yaml.node_get(node, bool, 'prepared', default_value=False),
- 'path': _yaml.node_get(node, str, 'path'),
- 'last_successful': _yaml.node_get(node, str, 'last_successful', default_value=None),
- 'running_files': _yaml.node_sanitize(
- _yaml.node_get(node, dict, 'running_files', default_value=None),
- dict_type=dict),
+ 'prepared': node.get_bool('prepared', default=False),
+ 'path': node.get_str('path'),
+ 'last_successful': node.get_str('last_successful', default=None),
+ 'running_files': running_files,
}
return Workspace.from_dict(self._toplevel_project, dictionary)
diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/_yaml.pxd
deleted file mode 100644
index 3cbad0d11..000000000
--- a/src/buildstream/_yaml.pxd
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Copyright (C) 2019 Bloomberg L.P.
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library. If not, see <http://www.gnu.org/licenses/>.
-#
-# Authors:
-# Benjamin Schubert <bschubert@bloomberg.net>
-
-# Documentation for each class and method here can be found in the adjacent
-# implementation file (_yaml.pyx)
-
-cdef class Node:
-
- cdef public object value
- cdef public int file_index
- cdef public int line
- cdef public int column
-
-
-cdef class ProvenanceInformation:
-
- cdef public Node node
- cdef str displayname
- cdef public str filename, shortname
- cdef public int col, line
- cdef public object project, toplevel
- cdef public bint is_synthetic
-
-
-cpdef void node_del(Node node, str key, bint safe=*) except *
-cpdef object node_get(Node node, object expected_type, str key, list indices=*, object default_value=*, bint allow_none=*)
-cpdef void node_validate(Node node, list valid_keys) except *
-cpdef void node_set(Node node, object key, object value, list indices=*) except *
-cpdef list node_keys(Node node)
-cpdef ProvenanceInformation node_get_provenance(Node node, str key=*, list indices=*)
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index b1fee9be3..be8e3dc18 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -21,136 +21,22 @@
# James Ennis <james.ennis@codethink.co.uk>
# Benjamin Schubert <bschubert@bloomberg.net>
+import datetime
import sys
-import string
from contextlib import ExitStack
from collections import OrderedDict
-from collections.abc import Mapping, Sequence
-from copy import deepcopy
+from collections.abc import Mapping
from ruamel import yaml
-from ._exceptions import LoadError, LoadErrorReason
-
-
-# Without this, pylint complains about all the `type(foo) is blah` checks
-# because it feels isinstance() is more idiomatic. Sadly, it is much slower to
-# do `isinstance(foo, blah)` for reasons I am unable to fathom. As such, we
-# blanket disable the check for this module.
-#
-# pylint: disable=unidiomatic-typecheck
-
-
-# Node()
-#
-# Container for YAML loaded data and its provenance
-#
-# All nodes returned (and all internal lists/strings) have this type (rather
-# than a plain tuple, to distinguish them in things like node_sanitize)
-#
-# Members:
-# value (str/list/dict): The loaded value.
-# file_index (int): Index within _FILE_LIST (a list of loaded file paths).
-# Negative indices indicate synthetic nodes so that
-# they can be referenced.
-# line (int): The line number within the file where the value appears.
-# col (int): The column number within the file where the value appears.
-#
-cdef class Node:
-
- def __init__(self, object value, int file_index, int line, int column):
- self.value = value
- self.file_index = file_index
- self.line = line
- self.column = column
-
- def __contains__(self, what):
- # Delegate to the inner value, though this will likely not work
- # very well if the node is a list or string, it's unlikely that
- # code which has access to such nodes would do this.
- return what in self.value
-
-
-# Metadata container for a yaml toplevel node.
-#
-# This class contains metadata around a yaml node in order to be able
-# to trace back the provenance of a node to the file.
-#
-cdef class FileInfo:
-
- cdef str filename, shortname, displayname
- cdef Node toplevel,
- cdef object project
-
- def __init__(self, str filename, str shortname, str displayname, Node toplevel, object project):
- self.filename = filename
- self.shortname = shortname
- self.displayname = displayname
- self.toplevel = toplevel
- self.project = project
-
-# File name handling
-cdef _FILE_LIST = []
-
-
-# Purely synthetic node will have _SYNTHETIC_FILE_INDEX for the file number, have line number
-# zero, and a negative column number which comes from inverting the next value
-# out of this counter. Synthetic nodes created with a reference node will
-# have a file number from the reference node, some unknown line number, and
-# a negative column number from this counter.
-cdef int _SYNTHETIC_FILE_INDEX = -1
-cdef int __counter = 0
-
-cdef int next_synthetic_counter():
- global __counter
- __counter -= 1
- return __counter
-
-
-# Returned from node_get_provenance
-cdef class ProvenanceInformation:
-
- def __init__(self, Node nodeish):
- cdef FileInfo fileinfo
-
- self.node = nodeish
- if (nodeish is None) or (nodeish.file_index == _SYNTHETIC_FILE_INDEX):
- self.filename = ""
- self.shortname = ""
- self.displayname = ""
- self.line = 1
- self.col = 0
- self.toplevel = None
- self.project = None
- else:
- fileinfo = <FileInfo> _FILE_LIST[nodeish.file_index]
- self.filename = fileinfo.filename
- self.shortname = fileinfo.shortname
- self.displayname = fileinfo.displayname
- # We add 1 here to convert from computerish to humanish
- self.line = nodeish.line + 1
- self.col = nodeish.column
- self.toplevel = fileinfo.toplevel
- self.project = fileinfo.project
- self.is_synthetic = (self.filename == '') or (self.col < 0)
-
- # Convert a Provenance to a string for error reporting
- def __str__(self):
- if self.is_synthetic:
- return "{} [synthetic node]".format(self.displayname)
- else:
- return "{} [line {:d} column {:d}]".format(self.displayname, self.line, self.col)
+from ._exceptions import LoadError, LoadErrorReason
+from . cimport node
+from .node cimport MappingNode, ScalarNode, SequenceNode
# These exceptions are intended to be caught entirely within
# the BuildStream framework, hence they do not reside in the
# public exceptions.py
-class CompositeError(Exception):
- def __init__(self, path, message):
- super().__init__(message)
- self.path = path
- self.message = message
-
class YAMLLoadError(Exception):
pass
@@ -232,7 +118,7 @@ cdef class Representer:
# Returns:
# (Node or None): Return the Node instance of the top level mapping or
# None if there wasn't one.
- cdef Node get_output(self):
+ cdef MappingNode get_output(self):
if len(self.output):
return self.output[0]
return None
@@ -280,7 +166,7 @@ cdef class Representer:
return RepresenterState.doc
cdef RepresenterState _handle_doc_MappingStartEvent(self, object ev):
- newmap = Node({}, self._file_index, ev.start_mark.line, ev.start_mark.column)
+ newmap = MappingNode.__new__(MappingNode, self._file_index, ev.start_mark.line, ev.start_mark.column, {})
self.output.append(newmap)
return RepresenterState.wait_key
@@ -290,14 +176,14 @@ cdef class Representer:
cdef RepresenterState _handle_wait_value_ScalarEvent(self, object ev):
key = self.keys.pop()
- (<dict> (<Node> self.output[-1]).value)[key] = \
- Node(ev.value, self._file_index, ev.start_mark.line, ev.start_mark.column)
+ (<MappingNode> self.output[-1]).value[key] = \
+ ScalarNode.__new__(ScalarNode, self._file_index, ev.start_mark.line, ev.start_mark.column, ev.value)
return RepresenterState.wait_key
cdef RepresenterState _handle_wait_value_MappingStartEvent(self, object ev):
cdef RepresenterState new_state = self._handle_doc_MappingStartEvent(ev)
key = self.keys.pop()
- (<dict> (<Node> self.output[-2]).value)[key] = self.output[-1]
+ (<MappingNode> self.output[-2]).value[key] = self.output[-1]
return new_state
cdef RepresenterState _handle_wait_key_MappingEndEvent(self, object ev):
@@ -305,7 +191,7 @@ cdef class Representer:
# unless it's the last one in which case we leave it
if len(self.output) > 1:
self.output.pop()
- if type((<Node> self.output[-1]).value) is list:
+ if type(self.output[-1]) is SequenceNode:
return RepresenterState.wait_list_item
else:
return RepresenterState.wait_key
@@ -313,14 +199,16 @@ cdef class Representer:
return RepresenterState.doc
cdef RepresenterState _handle_wait_value_SequenceStartEvent(self, object ev):
- self.output.append(Node([], self._file_index, ev.start_mark.line, ev.start_mark.column))
- (<dict> (<Node> self.output[-2]).value)[self.keys[-1]] = self.output[-1]
+ self.output.append(SequenceNode.__new__(
+ SequenceNode, self._file_index, ev.start_mark.line, ev.start_mark.column, []))
+ (<MappingNode> self.output[-2]).value[self.keys[-1]] = self.output[-1]
return RepresenterState.wait_list_item
cdef RepresenterState _handle_wait_list_item_SequenceStartEvent(self, object ev):
- self.keys.append(len((<Node> self.output[-1]).value))
- self.output.append(Node([], self._file_index, ev.start_mark.line, ev.start_mark.column))
- (<list> (<Node> self.output[-2]).value).append(self.output[-1])
+ self.keys.append(len((<SequenceNode> self.output[-1]).value))
+ self.output.append(SequenceNode.__new__(
+ SequenceNode, self._file_index, ev.start_mark.line, ev.start_mark.column, []))
+ (<SequenceNode> self.output[-2]).value.append(self.output[-1])
return RepresenterState.wait_list_item
cdef RepresenterState _handle_wait_list_item_SequenceEndEvent(self, object ev):
@@ -335,13 +223,13 @@ cdef class Representer:
return RepresenterState.wait_key
cdef RepresenterState _handle_wait_list_item_ScalarEvent(self, object ev):
- (<Node> self.output[-1]).value.append(
- Node(ev.value, self._file_index, ev.start_mark.line, ev.start_mark.column))
+ (<SequenceNode> self.output[-1]).value.append(
+ ScalarNode.__new__(ScalarNode, self._file_index, ev.start_mark.line, ev.start_mark.column, ev.value))
return RepresenterState.wait_list_item
cdef RepresenterState _handle_wait_list_item_MappingStartEvent(self, object ev):
cdef RepresenterState new_state = self._handle_doc_MappingStartEvent(ev)
- (<list> (<Node> self.output[-2]).value).append(self.output[-1])
+ (<SequenceNode> self.output[-2]).value.append(self.output[-1])
return new_state
cdef RepresenterState _handle_doc_DocumentEndEvent(self, object ev):
@@ -366,7 +254,9 @@ cdef class Representer:
#
# Raises: LoadError
#
-cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object project=None):
+cpdef MappingNode load(str filename, str shortname=None, bint copy_tree=False, object project=None):
+ cdef MappingNode data
+
if not shortname:
shortname = filename
@@ -376,10 +266,7 @@ cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object p
else:
displayname = shortname
- cdef Py_ssize_t file_number = len(_FILE_LIST)
- _FILE_LIST.append(FileInfo(filename, shortname, displayname, None, project))
-
- cdef Node data
+ cdef Py_ssize_t file_number = node._create_new_file(filename, shortname, displayname, project)
try:
with open(filename) as f:
@@ -404,9 +291,8 @@ cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object p
# Like load(), but doesnt require the data to be in a file
#
-cpdef Node load_data(str data, int file_index=_SYNTHETIC_FILE_INDEX, str file_name=None, bint copy_tree=False):
+cpdef MappingNode load_data(str data, int file_index=node._SYNTHETIC_FILE_INDEX, str file_name=None, bint copy_tree=False):
cdef Representer rep
- cdef FileInfo f_info
try:
rep = Representer(file_index)
@@ -426,936 +312,54 @@ cpdef Node load_data(str data, int file_index=_SYNTHETIC_FILE_INDEX, str file_na
raise LoadError(LoadErrorReason.INVALID_YAML,
"Severely malformed YAML:\n\n{}\n\n".format(e)) from e
- if type(contents) != Node:
+ if type(contents) != MappingNode:
# Special case allowance for None, when the loaded file has only comments in it.
if contents is None:
- contents = Node({}, file_index, 0, 0)
+ contents = MappingNode.__new__(MappingNode, file_index, 0, 0, {})
else:
raise LoadError(LoadErrorReason.INVALID_YAML,
"YAML file has content of type '{}' instead of expected type 'dict': {}"
.format(type(contents[0]).__name__, file_name))
# Store this away because we'll use it later for "top level" provenance
- if file_index != _SYNTHETIC_FILE_INDEX:
- f_info = <FileInfo> _FILE_LIST[file_index]
-
- _FILE_LIST[file_index] = FileInfo(
- f_info.filename,
- f_info.shortname,
- f_info.displayname,
- contents,
- f_info.project,
- )
+ node._set_root_node_for_file(file_index, contents)
if copy_tree:
- contents = node_copy(contents)
+ contents = contents.clone()
return contents
-# dump()
-#
-# Write a YAML node structure out to disk.
-#
-# This will always call `node_sanitize` on its input, so if you wanted
-# to output something close to what you read in, consider using the
-# `roundtrip_load` and `roundtrip_dump` function pair instead.
-#
-# Args:
-# contents (any): Content to write out
-# filename (str): The (optional) file name to write out to
-def dump(object contents, str filename=None):
- roundtrip_dump(node_sanitize(contents), file=filename)
-
-
-# node_get_provenance()
-#
-# Gets the provenance for a node
-#
-# Args:
-# node (Node): a dictionary
-# key (str): key in the dictionary
-# indices (list of indexes): Index path, in the case of list values
-#
-# Returns: The Provenance of the dict, member or list element
-#
-cpdef ProvenanceInformation node_get_provenance(Node node, str key=None, list indices=None):
- assert type(node.value) is dict
-
- if key is None:
- # Retrieving the provenance for this node directly
- return ProvenanceInformation(node)
-
- if key and not indices:
- return ProvenanceInformation(node.value.get(key))
-
- cdef Node nodeish = <Node> node.value.get(key)
- for idx in indices:
- nodeish = <Node> nodeish.value[idx]
-
- return ProvenanceInformation(nodeish)
-
-
-# A sentinel to be used as a default argument for functions that need
-# to distinguish between a kwarg set to None and an unset kwarg.
-_sentinel = object()
-
-
-# node_get()
-#
-# Fetches a value from a dictionary node and checks it for
-# an expected value. Use default_value when parsing a value
-# which is only optionally supplied.
-#
-# Args:
-# node (dict): The dictionary node
-# expected_type (type): The expected type for the value being searched
-# key (str): The key to get a value for in node
-# indices (list of ints): Optionally decend into lists of lists
-# default_value: Optionally return this value if the key is not found
-# allow_none: (bool): Allow None to be a valid value
-#
-# Returns:
-# The value if found in node, otherwise default_value is returned
-#
-# Raises:
-# LoadError, when the value found is not of the expected type
-#
-# Note:
-# Returned strings are stripped of leading and trailing whitespace
-#
-cpdef object node_get(Node node, object expected_type, str key, list indices=None, object default_value=_sentinel, bint allow_none=False):
- if indices is None:
- value = node.value.get(key, _sentinel)
-
- if value is _sentinel:
- if default_value is _sentinel:
- provenance = node_get_provenance(node)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
-
- value = Node(default_value, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
- else:
- # Implied type check of the element itself
- # No need to synthesise useful node content as we destructure it immediately
- value = Node(node_get(node, list, key), _SYNTHETIC_FILE_INDEX, 0, 0)
- for index in indices:
- value = value.value[index]
- if type(value) is not Node:
- value = Node(value, _SYNTHETIC_FILE_INDEX, 0, 0)
-
- # Optionally allow None as a valid value for any type
- if value.value is None and (allow_none or default_value is None):
- return None
-
- if (expected_type is not None) and (type(value.value) is not expected_type):
- # Attempt basic conversions if possible, typically we want to
- # be able to specify numeric values and convert them to strings,
- # but we dont want to try converting dicts/lists
- try:
- if expected_type == bool and type(value.value) is str:
- # Dont coerce booleans to string, this makes "False" strings evaluate to True
- # We don't structure into full nodes since there's no need.
- if value.value in ('True', 'true'):
- value = Node(True, _SYNTHETIC_FILE_INDEX, 0, 0)
- elif value.value in ('False', 'false'):
- value = Node(False, _SYNTHETIC_FILE_INDEX, 0, 0)
- else:
- raise ValueError()
- elif not (expected_type == list or
- expected_type == dict or
- isinstance(value.value, (list, dict))):
- value = Node(expected_type(value.value), _SYNTHETIC_FILE_INDEX, 0, 0)
- else:
- raise ValueError()
- except (ValueError, TypeError):
- provenance = node_get_provenance(node, key=key, indices=indices)
- if indices:
- path = [key, *["[{:d}]".format(i) for i in indices]]
- path = "".join(path)
- else:
- path = key
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, expected_type.__name__))
-
- # Now collapse lists, and scalars, to their value, leaving nodes as-is
- if type(value.value) is not dict:
- value = value.value
-
- # Trim it at the bud, let all loaded strings from yaml be stripped of whitespace
- if type(value) is str:
- value = value.strip()
-
- elif type(value) is list:
- # Now we create a fresh list which unwraps the str and list types
- # semi-recursively.
- value = __trim_list_provenance(value)
-
- return value
-
-
-cdef list __trim_list_provenance(list value):
- cdef list ret = []
- cdef Node entry
-
- for entry in value:
- if type(entry.value) is list:
- ret.append(__trim_list_provenance(entry.value))
- elif type(entry.value) is dict:
- ret.append(entry)
- else:
- ret.append(entry.value)
- return ret
-
-
-# node_set()
-#
-# Set an item within the node. If using `indices` be aware that the entry must
-# already exist, or else a KeyError will be raised. Use `node_extend_list` to
-# create entries before using `node_set`
-#
-# Args:
-# node (Node): The node
-# key (str): The key name
-# value: The value
-# indices: Any indices to index into the list referenced by key, like in
-# `node_get` (must be a list of integers)
-#
-cpdef void node_set(Node node, object key, object value, list indices=None) except *:
- cdef int idx
-
- if type(value) is list:
- value = __new_node_from_list(value)
-
- if indices:
- node = <Node> (<dict> node.value)[key]
- key = indices.pop()
- for idx in indices:
- node = <Node> (<list> node.value)[idx]
- if type(value) is Node:
- node.value[key] = value
- else:
- try:
- # Need to do this just in case we're modifying a list
- old_value = <Node> node.value[key]
- except KeyError:
- old_value = None
- if old_value is None:
- node.value[key] = Node(value, node.file_index, node.line, next_synthetic_counter())
- else:
- node.value[key] = Node(value, old_value.file_index, old_value.line, old_value.column)
-
-
-# node_extend_list()
-#
-# Extend a list inside a node to a given length, using the passed
-# default value to fill it out.
-#
-# Valid default values are:
-# Any string
-# An empty dict
-# An empty list
-#
-# Args:
-# node (node): The node
-# key (str): The list name in the node
-# length (int): The length to extend the list to
-# default (any): The default value to extend with.
-def node_extend_list(Node node, str key, Py_ssize_t length, object default):
- assert type(default) is str or default in ([], {})
-
- cdef Node list_node = <Node> node.value.get(key)
- if list_node is None:
- list_node = node.value[key] = Node([], node.file_index, node.line, next_synthetic_counter())
-
- cdef list the_list = list_node.value
- def_type = type(default)
-
- file_index = node.file_index
- if the_list:
- line_num = the_list[-1][2]
- else:
- line_num = list_node.line
-
- while length > len(the_list):
- if def_type is str:
- value = default
- elif def_type is list:
- value = []
- else:
- value = {}
-
- line_num += 1
-
- the_list.append(Node(value, file_index, line_num, next_synthetic_counter()))
-
-
-# node_items()
-#
-# A convenience generator for iterating over loaded key/value
-# tuples in a dictionary loaded from project YAML.
-#
-# Args:
-# node (Node): The dictionary node
-#
-# Yields:
-# (str): The key name
-# (anything): The value for the key
-#
-def node_items(Node node):
- cdef str key
- cdef Node value
-
- for key, value in node.value.items():
- if type(value.value) is dict:
- yield (key, value)
- elif type(value.value) is list:
- yield (key, __trim_list_provenance(value.value))
- else:
- yield (key, value.value)
-
-
-# node_keys()
-#
-# A convenience generator for iterating over loaded keys
-# in a dictionary loaded from project YAML.
-#
-# Args:
-# node (Node): The dictionary node
-#
-# Yields:
-# (str): The key name
-#
-cpdef list node_keys(Node node):
- return list(node.value.keys())
-
-
-# node_del()
-#
-# A convenience generator for iterating over loaded key/value
-# tuples in a dictionary loaded from project YAML.
-#
-# Args:
-# node (dict): The dictionary node
-# key (str): The key we want to remove
-# safe (bool): Whether to raise a KeyError if unable
-#
-cpdef void node_del(Node node, str key, bint safe=False) except *:
- try:
- del node.value[key]
- except KeyError:
- if not safe:
- raise
-
-
-# is_node()
-#
-# A test method which returns whether or not the passed in value
-# is a valid YAML node. It is not valid to call this on a Node
-# object which is not a Mapping.
-#
-# Args:
-# maybenode (any): The object to test for nodeness
-#
-# Returns:
-# (bool): Whether or not maybenode was a Node
-#
-def is_node(maybenode):
- # It's a programming error to give this a Node which isn't a mapping
- # so assert that.
- assert (type(maybenode) is not Node) or (type(maybenode.value) is dict)
- # Now return the type check
- return type(maybenode) is Node
-
-
-# new_synthetic_file()
-#
-# Create a new synthetic mapping node, with an associated file entry
-# (in _FILE_LIST) such that later tracking can correctly determine which
-# file needs writing to in order to persist the changes.
-#
-# Args:
-# filename (str): The name of the synthetic file to create
-# project (Project): The optional project to associate this synthetic file with
-#
-# Returns:
-# (Node): An empty YAML mapping node, whose provenance is to this new
-# synthetic file
-#
-def new_synthetic_file(str filename, object project=None):
- cdef Py_ssize_t file_index = len(_FILE_LIST)
- cdef Node node = Node({}, file_index, 0, 0)
-
- _FILE_LIST.append(FileInfo(filename,
- filename,
- "<synthetic {}>".format(filename),
- node,
- project))
- return node
-
-
-# new_empty_node()
-#
-# Args:
-# ref_node (Node): Optional node whose provenance should be referenced
-#
-# Returns
-# (Node): A new empty YAML mapping node
-#
-def new_empty_node(Node ref_node=None):
- if ref_node is not None:
- return Node({}, ref_node.file_index, ref_node.line, next_synthetic_counter())
- else:
- return Node({}, _SYNTHETIC_FILE_INDEX, 0, 0)
-
-
-# new_node_from_dict()
-#
-# Args:
-# indict (dict): The input dictionary
-#
-# Returns:
-# (Node): A new synthetic YAML tree which represents this dictionary
-#
-cpdef Node new_node_from_dict(dict indict):
- cdef dict ret = {}
- cdef str k
- for k, v in indict.items():
- vtype = type(v)
- if vtype is dict:
- ret[k] = new_node_from_dict(v)
- elif vtype is list:
- ret[k] = __new_node_from_list(v)
- else:
- ret[k] = Node(str(v), _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
- return Node(ret, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
-
-
-# Internal function to help new_node_from_dict() to handle lists
-cdef Node __new_node_from_list(list inlist):
- cdef list ret = []
- for v in inlist:
- vtype = type(v)
- if vtype is dict:
- ret.append(new_node_from_dict(v))
- elif vtype is list:
- ret.append(__new_node_from_list(v))
- else:
- ret.append(Node(str(v), _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter()))
- return Node(ret, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter())
-
-
-# _is_composite_list
-#
-# Checks if the given node is a Mapping with array composition
-# directives.
-#
-# Args:
-# node (value): Any node
-#
-# Returns:
-# (bool): True if node was a Mapping containing only
-# list composition directives
-#
-# Raises:
-# (LoadError): If node was a mapping and contained a mix of
-# list composition directives and other keys
-#
-cdef bint _is_composite_list(Node node):
- cdef bint has_directives = False
- cdef bint has_keys = False
- cdef str key
-
- if type(node.value) is dict:
- for key in node_keys(node):
- if key in ['(>)', '(<)', '(=)']: # pylint: disable=simplifiable-if-statement
- has_directives = True
- else:
- has_keys = True
-
- if has_keys and has_directives:
- provenance = node_get_provenance(node)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary contains array composition directives and arbitrary keys"
- .format(provenance))
- return has_directives
-
- return False
-
-
-# _compose_composite_list()
-#
-# Composes a composite list (i.e. a dict with list composition directives)
-# on top of a target list which is a composite list itself.
-#
-# Args:
-# target (Node): A composite list
-# source (Node): A composite list
-#
-cdef void _compose_composite_list(Node target, Node source):
- clobber = source.value.get("(=)")
- prefix = source.value.get("(<)")
- suffix = source.value.get("(>)")
- if clobber is not None:
- # We want to clobber the target list
- # which basically means replacing the target list
- # with ourselves
- target.value["(=)"] = clobber
- if prefix is not None:
- target.value["(<)"] = prefix
- elif "(<)" in target.value:
- target.value["(<)"].value.clear()
- if suffix is not None:
- target.value["(>)"] = suffix
- elif "(>)" in target.value:
- target.value["(>)"].value.clear()
- else:
- # Not clobbering, so prefix the prefix and suffix the suffix
- if prefix is not None:
- if "(<)" in target.value:
- for v in reversed(prefix.value):
- target.value["(<)"].value.insert(0, v)
- else:
- target.value["(<)"] = prefix
- if suffix is not None:
- if "(>)" in target.value:
- target.value["(>)"].value.extend(suffix.value)
- else:
- target.value["(>)"] = suffix
-
-
-# _compose_list()
-#
-# Compose a composite list (a dict with composition directives) on top of a
-# simple list.
-#
-# Args:
-# target (Node): The target list to be composed into
-# source (Node): The composition list to be composed from
-#
-cdef void _compose_list(Node target, Node source):
- clobber = source.value.get("(=)")
- prefix = source.value.get("(<)")
- suffix = source.value.get("(>)")
- if clobber is not None:
- target.value.clear()
- target.value.extend(clobber.value)
- if prefix is not None:
- for v in reversed(prefix.value):
- target.value.insert(0, v)
- if suffix is not None:
- target.value.extend(suffix.value)
-
-
-# composite_dict()
-#
-# Compose one mapping node onto another
-#
-# Args:
-# target (Node): The target to compose into
-# source (Node): The source to compose from
-# path (list): The path to the current composition node
-#
-# Raises: CompositeError
-#
-cpdef void composite_dict(Node target, Node source, list path=None) except *:
- cdef str k
- cdef Node v, target_value
-
- if path is None:
- path = []
- for k, v in source.value.items():
- path.append(k)
- if type(v.value) is list:
- # List clobbers anything list-like
- target_value = target.value.get(k)
- if not (target_value is None or
- type(target_value.value) is list or
- _is_composite_list(target_value)):
- raise CompositeError(path,
- "{}: List cannot overwrite {} at: {}"
- .format(node_get_provenance(source, k),
- k,
- node_get_provenance(target, k)))
- # Looks good, clobber it
- target.value[k] = v
- elif _is_composite_list(v):
- if k not in target.value:
- # Composite list clobbers empty space
- target.value[k] = v
- elif type(target.value[k].value) is list:
- # Composite list composes into a list
- _compose_list(target.value[k], v)
- elif _is_composite_list(target.value[k]):
- # Composite list merges into composite list
- _compose_composite_list(target.value[k], v)
- else:
- # Else composing on top of normal dict or a scalar, so raise...
- raise CompositeError(path,
- "{}: Cannot compose lists onto {}".format(
- node_get_provenance(v),
- node_get_provenance(target.value[k])))
- elif type(v.value) is dict:
- # We're composing a dict into target now
- if k not in target.value:
- # Target lacks a dict at that point, make a fresh one with
- # the same provenance as the incoming dict
- target.value[k] = Node({}, v.file_index, v.line, v.column)
- if type(target.value) is not dict:
- raise CompositeError(path,
- "{}: Cannot compose dictionary onto {}".format(
- node_get_provenance(v),
- node_get_provenance(target.value[k])))
- composite_dict(target.value[k], v, path)
- else:
- target_value = target.value.get(k)
- if target_value is not None and type(target_value.value) is not str:
- raise CompositeError(path,
- "{}: Cannot compose scalar on non-scalar at {}".format(
- node_get_provenance(v),
- node_get_provenance(target.value[k])))
- target.value[k] = v
- path.pop()
-
-
-# Like composite_dict(), but raises an all purpose LoadError for convenience
-#
-cpdef void composite(Node target, Node source) except *:
- assert type(source.value) is dict
- assert type(target.value) is dict
-
- try:
- composite_dict(target, source)
- except CompositeError as e:
- source_provenance = node_get_provenance(source)
- error_prefix = ""
- if source_provenance:
- error_prefix = "{}: ".format(source_provenance)
- raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
- "{}Failure composing {}: {}"
- .format(error_prefix,
- e.path,
- e.message)) from e
-
-
-# Like composite(target, source), but where target overrides source instead.
-#
-def composite_and_move(Node target, Node source):
- composite(source, target)
-
- cdef str key
- cdef Node value
- cdef list to_delete = [key for key in target.value.keys() if key not in source.value]
- for key, value in source.value.items():
- target.value[key] = value
- for key in to_delete:
- del target.value[key]
-
-
-# Types we can short-circuit in node_sanitize for speed.
-__SANITIZE_SHORT_CIRCUIT_TYPES = (int, float, str, bool)
-
-
-# node_sanitize()
-#
-# Returns an alphabetically ordered recursive copy
-# of the source node with internal provenance information stripped.
-#
-# Only dicts are ordered, list elements are left in order.
-#
-cpdef object node_sanitize(object node, object dict_type=OrderedDict):
- node_type = type(node)
-
- # If we have an unwrappable node, unwrap it
- if node_type is Node:
- node = node.value
- node_type = type(node)
-
- # Short-circuit None which occurs ca. twice per element
- if node is None:
- return node
-
- # Next short-circuit integers, floats, strings, booleans, and tuples
- if node_type in __SANITIZE_SHORT_CIRCUIT_TYPES:
- return node
-
- # Now short-circuit lists.
- elif node_type is list:
- return [node_sanitize(elt, dict_type=dict_type) for elt in node]
-
- # Finally dict, and other Mappings need special handling
- elif node_type is dict:
- result = dict_type()
-
- key_list = [key for key, _ in node.items()]
- for key in sorted(key_list):
- result[key] = node_sanitize(node[key], dict_type=dict_type)
-
- return result
-
- # Sometimes we're handed tuples and we can't be sure what they contain
- # so we have to sanitize into them
- elif node_type is tuple:
- return tuple([node_sanitize(v, dict_type=dict_type) for v in node])
-
- # Everything else just gets returned as-is.
- return node
-
-
-# node_validate()
-#
-# Validate the node so as to ensure the user has not specified
-# any keys which are unrecognized by buildstream (usually this
-# means a typo which would otherwise not trigger an error).
-#
-# Args:
-# node (Node): A dictionary loaded from YAML
-# valid_keys (list): A list of valid keys for the specified node
-#
-# Raises:
-# LoadError: In the case that the specified node contained
-# one or more invalid keys
-#
-cpdef void node_validate(Node node, list valid_keys) except *:
-
- # Probably the fastest way to do this: https://stackoverflow.com/a/23062482
- cdef set valid_keys_set = set(valid_keys)
- cdef str key
-
- for key in node.value:
- if key not in valid_keys_set:
- provenance = node_get_provenance(node, key=key)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Unexpected key: {}".format(provenance, key))
-
-
-# Node copying
-#
-# Unfortunately we copy nodes a *lot* and `isinstance()` is super-slow when
-# things from collections.abc get involved. The result is the following
-# intricate but substantially faster group of tuples and the use of `in`.
-#
-# If any of the {node,list}_copy routines raise a ValueError
-# then it's likely additional types need adding to these tuples.
-
-
-# These types just have their value copied
-__QUICK_TYPES = (str, bool)
-
-# These are the directives used to compose lists, we need this because it's
-# slightly faster during the node_final_assertions checks
-__NODE_ASSERT_COMPOSITION_DIRECTIVES = ('(>)', '(<)', '(=)')
-
-
-# node_copy()
-#
-# Make a deep copy of the given YAML node, preserving provenance.
-#
-# Args:
-# source (Node): The YAML node to copy
-#
-# Returns:
-# (Node): A deep copy of source with provenance preserved.
-#
-cpdef Node node_copy(Node source):
- cdef dict copy = {}
- cdef str key
- cdef Node value
-
- for key, value in source.value.items():
- value_type = type(value.value)
- if value_type is dict:
- copy[key] = node_copy(value)
- elif value_type is list:
- copy[key] = _list_copy(value)
- elif value_type in __QUICK_TYPES:
- copy[key] = value
- else:
- raise ValueError("Unable to be quick about node_copy of {}".format(value_type))
-
- return Node(copy, source.file_index, source.line, source.column)
-
-
-# Internal function to help node_copy() but for lists.
-cdef Node _list_copy(Node source):
- cdef list copy = []
- cdef Node item
-
- for item in source.value:
- item_type = type(item.value)
-
- if item_type is dict:
- copy.append(node_copy(item))
- elif item_type is list:
- copy.append(_list_copy(item))
- elif item_type in __QUICK_TYPES:
- copy.append(item)
- else:
- raise ValueError("Unable to be quick about list_copy of {}".format(item_type))
-
- return Node(copy, source.file_index, source.line, source.column)
-
-
-# node_final_assertions()
-#
-# This must be called on a fully loaded and composited node,
-# after all composition has completed.
-#
-# Args:
-# node (Mapping): The final composited node
-#
-# Raises:
-# (LoadError): If any assertions fail
-#
-cpdef void node_final_assertions(Node node) except *:
- cdef str key
- cdef Node value
-
- for key, value in node.value.items():
-
- # Assert that list composition directives dont remain, this
- # indicates that the user intended to override a list which
- # never existed in the underlying data
- #
- if key in __NODE_ASSERT_COMPOSITION_DIRECTIVES:
- provenance = node_get_provenance(node, key)
- raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
- "{}: Attempt to override non-existing list".format(provenance))
+###############################################################################
- value_type = type(value.value)
+# Roundtrip code
- if value_type is dict:
- node_final_assertions(value)
- elif value_type is list:
- _list_final_assertions(value)
+# Represent Nodes automatically
+def represent_mapping(self, MappingNode mapping):
+ return self.represent_dict(mapping.value)
-# Helper function for node_final_assertions(), but for lists.
-def _list_final_assertions(Node values):
- for value in values.value:
- value_type = type(value.value)
+def represent_scalar(self, ScalarNode scalar):
+ return self.represent_str(scalar.value)
- if value_type is dict:
- node_final_assertions(value)
- elif value_type is list:
- _list_final_assertions(value)
+def represent_sequence(self, SequenceNode sequence):
+ return self.represent_list(sequence.value)
-# assert_symbol_name()
-#
-# A helper function to check if a loaded string is a valid symbol
-# name and to raise a consistent LoadError if not. For strings which
-# are required to be symbols.
-#
-# Args:
-# provenance (Provenance): The provenance of the loaded symbol, or None
-# symbol_name (str): The loaded symbol name
-# purpose (str): The purpose of the string, for an error message
-# allow_dashes (bool): Whether dashes are allowed for this symbol
-#
-# Raises:
-# LoadError: If the symbol_name is invalid
-#
-# Note that dashes are generally preferred for variable names and
-# usage in YAML, but things such as option names which will be
-# evaluated with jinja2 cannot use dashes.
-def assert_symbol_name(ProvenanceInformation provenance, str symbol_name, str purpose, *, bint allow_dashes=True):
- cdef str valid_chars = string.digits + string.ascii_letters + '_'
- if allow_dashes:
- valid_chars += '-'
-
- cdef bint valid = True
- if not symbol_name:
- valid = False
- elif any(x not in valid_chars for x in symbol_name):
- valid = False
- elif symbol_name[0] in string.digits:
- valid = False
-
- if not valid:
- detail = "Symbol names must contain only alphanumeric characters, " + \
- "may not start with a digit, and may contain underscores"
- if allow_dashes:
- detail += " or dashes"
-
- message = "Invalid symbol name for {}: '{}'".format(purpose, symbol_name)
- if provenance is not None:
- message = "{}: {}".format(provenance, message)
-
- raise LoadError(LoadErrorReason.INVALID_SYMBOL_NAME,
- message, detail=detail)
-
-
-# node_find_target()
-#
-# Searches the given node tree for the given target node.
-#
-# This is typically used when trying to walk a path to a given node
-# for the purpose of then modifying a similar tree of objects elsewhere
-#
-# If the key is provided, then we actually hunt for the node represented by
-# target[key] and return its container, rather than hunting for target directly
-#
-# Args:
-# node (Node): The node at the root of the tree to search
-# target (Node): The node you are looking for in that tree
-# key (str): Optional string key within target node
-#
-# Returns:
-# (list): A path from `node` to `target` or None if `target` is not in the subtree
-cpdef list node_find_target(Node node, Node target, str key=None):
- if key is not None:
- target = target.value[key]
-
- cdef list path = []
- if _walk_find_target(node, path, target):
- if key:
- # Remove key from end of path
- path = path[:-1]
- return path
- return None
-
-
-# Helper for node_find_target() which walks a value
-cdef bint _walk_find_target(Node node, list path, Node target):
- if node.file_index == target.file_index and node.line == target.line and node.column == target.column:
- return True
- elif type(node.value) is dict:
- return _walk_dict_node(node, path, target)
- elif type(node.value) is list:
- return _walk_list_node(node, path, target)
- return False
-
-
-# Helper for node_find_target() which walks a list
-cdef bint _walk_list_node(Node node, list path, Node target):
- cdef int i
- cdef Node v
-
- for i, v in enumerate(node.value):
- path.append(i)
- if _walk_find_target(v, path, target):
- return True
- del path[-1]
- return False
-
-
-# Helper for node_find_target() which walks a mapping
-cdef bint _walk_dict_node(Node node, list path, Node target):
- cdef str k
- cdef Node v
-
- for k, v in node.value.items():
- path.append(k)
- if _walk_find_target(v, path, target):
- return True
- del path[-1]
- return False
+yaml.RoundTripRepresenter.add_representer(MappingNode, represent_mapping)
+yaml.RoundTripRepresenter.add_representer(ScalarNode, represent_scalar)
+yaml.RoundTripRepresenter.add_representer(SequenceNode, represent_sequence)
+# Represent simple types as strings
-###############################################################################
+def represent_as_str(self, value):
+ return self.represent_str(str(value))
-# Roundtrip code
+yaml.RoundTripRepresenter.add_representer(type(None), represent_as_str)
+yaml.RoundTripRepresenter.add_representer(int, represent_as_str)
+yaml.RoundTripRepresenter.add_representer(float, represent_as_str)
+yaml.RoundTripRepresenter.add_representer(bool, represent_as_str)
+yaml.RoundTripRepresenter.add_representer(datetime.datetime, represent_as_str)
+yaml.RoundTripRepresenter.add_representer(datetime.date, represent_as_str)
# Always represent things consistently:
@@ -1487,33 +491,6 @@ def roundtrip_load_data(contents, *, filename=None):
# file (any): The file to write to
#
def roundtrip_dump(contents, file=None):
- assert type(contents) is not Node
-
- def stringify_dict(thing):
- for k, v in thing.items():
- if type(v) is str:
- pass
- elif isinstance(v, Mapping):
- stringify_dict(v)
- elif isinstance(v, Sequence):
- stringify_list(v)
- else:
- thing[k] = str(v)
-
- def stringify_list(thing):
- for i, v in enumerate(thing):
- if type(v) is str:
- pass
- elif isinstance(v, Mapping):
- stringify_dict(v)
- elif isinstance(v, Sequence):
- stringify_list(v)
- else:
- thing[i] = str(v)
-
- contents = deepcopy(contents)
- stringify_dict(contents)
-
with ExitStack() as stack:
if type(file) is str:
from . import utils
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 158f5fc11..b79876843 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -169,7 +169,7 @@ class BuildElement(Element):
# FIXME: Currently this forcefully validates configurations
# for all BuildElement subclasses so they are unable to
# extend the configuration
- self.node_validate(node, _command_steps)
+ node.validate_keys(_command_steps)
for command_name in _legacy_command_steps:
if command_name in _command_steps:
@@ -281,14 +281,11 @@ class BuildElement(Element):
# Private Local Methods #
#############################################################
def __get_commands(self, node, name):
- list_node = self.node_get_member(node, list, name, [])
- commands = []
-
- for i in range(len(list_node)):
- command = self.node_subst_list_element(node, name, [i])
- commands.append(command)
-
- return commands
+ raw_commands = node.get_sequence(name, []).as_str_list()
+ return [
+ self.substitute_variables(command)
+ for command in raw_commands
+ ]
def __run_command(self, sandbox, cmd):
# Note the -e switch to 'sh' means to exit with an error
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 9e6e7a81f..570e473e9 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -97,6 +97,7 @@ from . import _cachekey
from . import _signals
from . import _site
from ._platform import Platform
+from .node import Node, _sentinel as _node_sentinel
from .plugin import Plugin
from .sandbox import SandboxFlags, SandboxCommandError
from .sandbox._config import SandboxConfig
@@ -253,7 +254,7 @@ class Element(Plugin):
# Collect the composited variables and resolve them
variables = self.__extract_variables(project, meta)
- _yaml.node_set(variables, 'element-name', self.name)
+ variables['element-name'] = self.name
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -484,12 +485,15 @@ class Element(Plugin):
return None
- def node_subst_member(self, node, member_name, default=_yaml._sentinel):
+ def substitute_variables(self, value):
+ return self.__variables.subst(value)
+
+ def node_subst_member(self, node, member_name, default=_node_sentinel):
"""Fetch the value of a string node member, substituting any variables
in the loaded value with the element contextual variables.
Args:
- node (dict): A dictionary loaded from YAML
+ node (:class:`MappingNode <buildstream.node.MappingNode>`): A MappingNode loaded from YAML
member_name (str): The name of the member to fetch
default (str): A value to return when *member_name* is not specified in *node*
@@ -499,10 +503,6 @@ class Element(Plugin):
Raises:
:class:`.LoadError`: When *member_name* is not found and no *default* was provided
- This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_member`
- except that it assumes the expected type is a string and will also perform variable
- substitutions.
-
**Example:**
.. code:: python
@@ -511,18 +511,18 @@ class Element(Plugin):
# variables in the returned string
name = self.node_subst_member(node, 'name')
"""
- value = self.node_get_member(node, str, member_name, default)
+ value = node.get_str(member_name, default)
try:
return self.__variables.subst(value)
except LoadError as e:
- provenance = _yaml.node_get_provenance(node, key=member_name)
+ provenance = node.get_scalar(member_name).get_provenance()
raise LoadError(e.reason, '{}: {}'.format(provenance, e), detail=e.detail) from e
def node_subst_list(self, node, member_name):
"""Fetch a list from a node member, substituting any variables in the list
Args:
- node (dict): A dictionary loaded from YAML
+ node (:class:`MappingNode <buildstream.node.MappingNode>`): A MappingNode loaded from YAML
member_name (str): The name of the member to fetch (a list)
Returns:
@@ -531,61 +531,16 @@ class Element(Plugin):
Raises:
:class:`.LoadError`
- This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_member`
- except that it assumes the expected type is a list of strings and will also
- perform variable substitutions.
"""
- value = self.node_get_member(node, list, member_name)
ret = []
- for index, x in enumerate(value):
+ for value in node.get_sequence(member_name):
try:
- ret.append(self.__variables.subst(x))
+ ret.append(self.__variables.subst(value.as_str()))
except LoadError as e:
- provenance = _yaml.node_get_provenance(node, key=member_name, indices=[index])
+ provenance = value.get_provenance()
raise LoadError(e.reason, '{}: {}'.format(provenance, e), detail=e.detail) from e
return ret
- def node_subst_list_element(self, node, member_name, indices):
- """Fetch the value of a list element from a node member, substituting any variables
- in the loaded value with the element contextual variables.
-
- Args:
- node (dict): A dictionary loaded from YAML
- member_name (str): The name of the member to fetch
- indices (list of int): List of indices to search, in case of nested lists
-
- Returns:
- The value of the list element in *member_name* at the specified *indices*
-
- Raises:
- :class:`.LoadError`
-
- This is essentially the same as :func:`~buildstream.plugin.Plugin.node_get_list_element`
- except that it assumes the expected type is a string and will also perform variable
- substitutions.
-
- **Example:**
-
- .. code:: python
-
- # Fetch the list itself
- strings = self.node_get_member(node, list, 'strings')
-
- # Iterate over the list indices
- for i in range(len(strings)):
-
- # Fetch the strings in this list, substituting content
- # with our element's variables if needed
- string = self.node_subst_list_element(
- node, 'strings', [ i ])
- """
- value = self.node_get_list_element(node, str, member_name, indices)
- try:
- return self.__variables.subst(value)
- except LoadError as e:
- provenance = _yaml.node_get_provenance(node, key=member_name, indices=indices)
- raise LoadError(e.reason, '{}: {}'.format(provenance, e), detail=e.detail) from e
-
def compute_manifest(self, *, include=None, exclude=None, orphans=True):
"""Compute and return this element's selective manifest
@@ -853,9 +808,9 @@ class Element(Plugin):
if bstdata is not None:
with sandbox.batch(SandboxFlags.NONE):
- commands = self.node_get_member(bstdata, list, 'integration-commands', [])
- for i in range(len(commands)):
- cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
+ commands = bstdata.get_sequence('integration-commands', []).as_str_list()
+ for command in commands:
+ cmd = self.substitute_variables(command)
sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
label=cmd)
@@ -884,7 +839,7 @@ class Element(Plugin):
domain (str): A public domain name to fetch data for
Returns:
- (dict): The public data dictionary for the given domain
+ :class:`MappingNode <buildstream.node.MappingNode>`: The public data dictionary for the given domain
.. note::
@@ -895,9 +850,9 @@ class Element(Plugin):
if self.__dynamic_public is None:
self.__load_public_data()
- data = _yaml.node_get(self.__dynamic_public, dict, domain, default_value=None)
+ data = self.__dynamic_public.get_mapping(domain, default=None)
if data is not None:
- data = _yaml.node_copy(data)
+ data = data.clone()
return data
@@ -906,7 +861,7 @@ class Element(Plugin):
Args:
domain (str): A public domain name to fetch data for
- data (dict): The public data dictionary for the given domain
+ data (:class:`MappingNode <buildstream.node.MappingNode>`): The public data dictionary for the given domain
This allows an element to dynamically mutate public data of
elements or add new domains as the result of success completion
@@ -917,9 +872,9 @@ class Element(Plugin):
self.__load_public_data()
if data is not None:
- data = _yaml.node_copy(data)
+ data = data.clone()
- _yaml.node_set(self.__dynamic_public, domain, data)
+ self.__dynamic_public[domain] = data
def get_environment(self):
"""Fetch the environment suitable for running in the sandbox
@@ -928,7 +883,7 @@ class Element(Plugin):
(dict): A dictionary of string key/values suitable for passing
to :func:`Sandbox.run() <buildstream.sandbox.Sandbox.run>`
"""
- return _yaml.node_sanitize(self.__environment)
+ return self.__environment
def get_variable(self, varname):
"""Fetch the value of a variable resolved for this element.
@@ -1677,7 +1632,7 @@ class Element(Plugin):
# By default, the dynamic public data is the same as the static public data.
# The plugin's assemble() method may modify this, though.
- self.__dynamic_public = _yaml.node_copy(self.__public)
+ self.__dynamic_public = self.__public.clone()
# Call the abstract plugin methods
@@ -2208,7 +2163,7 @@ class Element(Plugin):
'environment': cache_env,
'sources': [s._get_unique_key(workspace is None) for s in self.__sources],
'workspace': '' if workspace is None else workspace.get_key(self._get_project()),
- 'public': self.__public
+ 'public': self.__public._strip_node_info(),
}
self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
@@ -2534,29 +2489,29 @@ class Element(Plugin):
@classmethod
def __compose_default_splits(cls, project, defaults, is_junction):
- element_public = _yaml.node_get(defaults, dict, 'public', default_value={})
- element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
+ element_public = defaults.get_mapping("public", default={})
+ element_bst = element_public.get_mapping("bst", default={})
+ element_splits = element_bst.get_mapping("split-rules", default={})
if is_junction:
- splits = _yaml.node_copy(element_splits)
+ splits = element_splits.clone()
else:
assert project._splits is not None
- splits = _yaml.node_copy(project._splits)
+ splits = project._splits.clone()
# Extend project wide split rules with any split rules defined by the element
- _yaml.composite(splits, element_splits)
+ element_splits._composite(splits)
- _yaml.node_set(element_bst, 'split-rules', splits)
- _yaml.node_set(element_public, 'bst', element_bst)
- _yaml.node_set(defaults, 'public', element_public)
+ element_bst['split-rules'] = splits
+ element_public['bst'] = element_bst
+ defaults['public'] = element_public
@classmethod
def __init_defaults(cls, project, plugin_conf, kind, is_junction):
# Defaults are loaded once per class and then reused
#
if cls.__defaults is None:
- defaults = _yaml.new_empty_node()
+ defaults = Node.from_dict({})
if plugin_conf is not None:
# Load the plugin's accompanying .yaml file if one was provided
@@ -2576,9 +2531,9 @@ class Element(Plugin):
else:
elements = project.element_overrides
- overrides = _yaml.node_get(elements, dict, kind, default_value=None)
+ overrides = elements.get_mapping(kind, default=None)
if overrides:
- _yaml.composite(defaults, overrides)
+ overrides._composite(defaults)
# Set the data class wide
cls.__defaults = defaults
@@ -2588,16 +2543,16 @@ class Element(Plugin):
#
@classmethod
def __extract_environment(cls, project, meta):
- default_env = _yaml.node_get(cls.__defaults, dict, 'environment', default_value={})
+ default_env = cls.__defaults.get_mapping("environment", default={})
if meta.is_junction:
- environment = _yaml.new_empty_node()
+ environment = Node.from_dict({})
else:
- environment = _yaml.node_copy(project.base_environment)
+ environment = project.base_environment.clone()
- _yaml.composite(environment, default_env)
- _yaml.composite(environment, meta.environment)
- _yaml.node_final_assertions(environment)
+ default_env._composite(environment)
+ meta.environment._composite(environment)
+ environment._assert_fully_composited()
return environment
@@ -2607,7 +2562,7 @@ class Element(Plugin):
def __expand_environment(self, environment):
# Resolve variables in environment value strings
final_env = {}
- for key, _ in self.node_items(environment):
+ for key in environment.keys():
final_env[key] = self.node_subst_member(environment, key)
return final_env
@@ -2619,7 +2574,7 @@ class Element(Plugin):
else:
project_nocache = project.base_env_nocache
- default_nocache = _yaml.node_get(cls.__defaults, list, 'environment-nocache', default_value=[])
+ default_nocache = cls.__defaults.get_sequence('environment-nocache', default=[]).as_str_list()
element_nocache = meta.env_nocache
# Accumulate values from the element default, the project and the element
@@ -2634,21 +2589,25 @@ class Element(Plugin):
#
@classmethod
def __extract_variables(cls, project, meta):
- default_vars = _yaml.node_get(cls.__defaults, dict, 'variables',
- default_value={})
+ default_vars = cls.__defaults.get_mapping('variables', default={})
if meta.is_junction:
- variables = _yaml.node_copy(project.first_pass_config.base_variables)
+ variables = project.first_pass_config.base_variables.clone()
else:
- variables = _yaml.node_copy(project.base_variables)
+ variables = project.base_variables.clone()
- _yaml.composite(variables, default_vars)
- _yaml.composite(variables, meta.variables)
- _yaml.node_final_assertions(variables)
+ default_vars._composite(variables)
+ meta.variables._composite(variables)
+ variables._assert_fully_composited()
for var in ('project-name', 'element-name', 'max-jobs'):
- provenance = _yaml.node_get_provenance(variables, var)
- if provenance and not provenance.is_synthetic:
+ node = variables.get_node(var, allow_none=True)
+
+ if node is None:
+ continue
+
+ provenance = node.get_provenance()
+ if not provenance._is_synthetic:
raise LoadError(LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
"{}: invalid redefinition of protected variable '{}'"
.format(provenance, var))
@@ -2662,11 +2621,11 @@ class Element(Plugin):
def __extract_config(cls, meta):
# The default config is already composited with the project overrides
- config = _yaml.node_get(cls.__defaults, dict, 'config', default_value={})
- config = _yaml.node_copy(config)
+ config = cls.__defaults.get_mapping('config', default={})
+ config = config.clone()
- _yaml.composite(config, meta.config)
- _yaml.node_final_assertions(config)
+ meta.config._composite(config)
+ config._assert_fully_composited()
return config
@@ -2675,12 +2634,12 @@ class Element(Plugin):
@classmethod
def __extract_sandbox_config(cls, project, meta):
if meta.is_junction:
- sandbox_config = _yaml.new_node_from_dict({
+ sandbox_config = Node.from_dict({
'build-uid': 0,
'build-gid': 0
})
else:
- sandbox_config = _yaml.node_copy(project._sandbox)
+ sandbox_config = project._sandbox.clone()
# Get the platform to ask for host architecture
platform = Platform.get_platform()
@@ -2688,26 +2647,26 @@ class Element(Plugin):
host_os = platform.get_host_os()
# The default config is already composited with the project overrides
- sandbox_defaults = _yaml.node_get(cls.__defaults, dict, 'sandbox', default_value={})
- sandbox_defaults = _yaml.node_copy(sandbox_defaults)
+ sandbox_defaults = cls.__defaults.get_mapping('sandbox', default={})
+ sandbox_defaults = sandbox_defaults.clone()
- _yaml.composite(sandbox_config, sandbox_defaults)
- _yaml.composite(sandbox_config, meta.sandbox)
- _yaml.node_final_assertions(sandbox_config)
+ sandbox_defaults._composite(sandbox_config)
+ meta.sandbox._composite(sandbox_config)
+ sandbox_config._assert_fully_composited()
# Sandbox config, unlike others, has fixed members so we should validate them
- _yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch'])
+ sandbox_config.validate_keys(['build-uid', 'build-gid', 'build-os', 'build-arch'])
- build_arch = _yaml.node_get(sandbox_config, str, 'build-arch', default_value=None)
+ build_arch = sandbox_config.get_str('build-arch', default=None)
if build_arch:
build_arch = Platform.canonicalize_arch(build_arch)
else:
build_arch = host_arch
return SandboxConfig(
- _yaml.node_get(sandbox_config, int, 'build-uid'),
- _yaml.node_get(sandbox_config, int, 'build-gid'),
- _yaml.node_get(sandbox_config, str, 'build-os', default_value=host_os),
+ sandbox_config.get_int('build-uid'),
+ sandbox_config.get_int('build-gid'),
+ sandbox_config.get_str('build-os', default=host_os),
build_arch)
# This makes a special exception for the split rules, which
@@ -2715,48 +2674,48 @@ class Element(Plugin):
#
@classmethod
def __extract_public(cls, meta):
- base_public = _yaml.node_get(cls.__defaults, dict, 'public', default_value={})
- base_public = _yaml.node_copy(base_public)
+ base_public = cls.__defaults.get_mapping('public', default={})
+ base_public = base_public.clone()
- base_bst = _yaml.node_get(base_public, dict, 'bst', default_value={})
- base_splits = _yaml.node_get(base_bst, dict, 'split-rules', default_value={})
+ base_bst = base_public.get_mapping('bst', default={})
+ base_splits = base_bst.get_mapping('split-rules', default={})
- element_public = _yaml.node_copy(meta.public)
- element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
+ element_public = meta.public.clone()
+ element_bst = element_public.get_mapping('bst', default={})
+ element_splits = element_bst.get_mapping('split-rules', default={})
# Allow elements to extend the default splits defined in their project or
# element specific defaults
- _yaml.composite(base_splits, element_splits)
+ element_splits._composite(base_splits)
- _yaml.node_set(element_bst, 'split-rules', base_splits)
- _yaml.node_set(element_public, 'bst', element_bst)
+ element_bst['split-rules'] = base_splits
+ element_public['bst'] = element_bst
- _yaml.node_final_assertions(element_public)
+ element_public._assert_fully_composited()
return element_public
# Expand the splits in the public data using the Variables in the element
def __expand_splits(self, element_public):
- element_bst = _yaml.node_get(element_public, dict, 'bst', default_value={})
- element_splits = _yaml.node_get(element_bst, dict, 'split-rules', default_value={})
+ element_bst = element_public.get_mapping('bst', default={})
+ element_splits = element_bst.get_mapping('split-rules', default={})
# Resolve any variables in the public split rules directly
- for domain, splits in self.node_items(element_splits):
+ for domain, splits in element_splits.items():
splits = [
self.__variables.subst(split.strip())
- for split in splits
+ for split in splits.as_str_list()
]
- _yaml.node_set(element_splits, domain, splits)
+ element_splits[domain] = splits
return element_public
def __init_splits(self):
bstdata = self.get_public_data('bst')
- splits = self.node_get_member(bstdata, dict, 'split-rules')
+ splits = bstdata.get_mapping('split-rules')
self.__splits = {
- domain: re.compile('^(?:' + '|'.join([utils._glob2re(r) for r in rules]) + ')$')
- for domain, rules in self.node_items(splits)
+ domain: re.compile('^(?:' + '|'.join([utils._glob2re(r) for r in rules.as_str_list()]) + ')$')
+ for domain, rules in splits.items()
}
# __split_filter():
@@ -2857,7 +2816,7 @@ class Element(Plugin):
# If this ever changes, things will go wrong unexpectedly.
if not self.__whitelist_regex:
bstdata = self.get_public_data('bst')
- whitelist = _yaml.node_get(bstdata, list, 'overlap-whitelist', default_value=[])
+ whitelist = bstdata.get_sequence('overlap-whitelist', default=[]).as_str_list()
whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
expression = ('^(?:' + '|'.join(whitelist_expressions) + ')$')
self.__whitelist_regex = re.compile(expression)
diff --git a/src/buildstream/node.pxd b/src/buildstream/node.pxd
new file mode 100644
index 000000000..fdfa06c70
--- /dev/null
+++ b/src/buildstream/node.pxd
@@ -0,0 +1,112 @@
+#
+# Copyright (C) 2019 Bloomberg L.P.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Benjamin Schubert <bschubert@bloomberg.net>
+
+# Documentation for each class and method here can be found in the adjacent
+# implementation file (_yaml.pyx)
+
+cdef class Node:
+
+ cdef int file_index
+ cdef int line
+ cdef int column
+
+ # Public Methods
+ cpdef Node clone(self)
+ cpdef ProvenanceInformation get_provenance(self)
+
+ # Private Methods used in BuildStream
+ cpdef void _assert_fully_composited(self) except *
+ cpdef object _strip_node_info(self)
+
+ # Protected Methods
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *
+ cdef bint _is_composite_list(self) except *
+ cdef bint _shares_position_with(self, Node target)
+ cdef bint _walk_find(self, Node target, list path) except *
+
+
+cdef class MappingNode(Node):
+ cdef dict value
+
+ # Public Methods
+ cpdef bint get_bool(self, str key, default=*) except *
+ cpdef int get_int(self, str key, default=*) except *
+ cpdef MappingNode get_mapping(self, str key, default=*)
+ cpdef Node get_node(self, str key, list allowed_types=*, bint allow_none=*)
+ cpdef ScalarNode get_scalar(self, str key, default=*)
+ cpdef SequenceNode get_sequence(self, str key, object default=*)
+ cpdef str get_str(self, str key, object default=*)
+ cpdef object items(self)
+ cpdef list keys(self)
+ cpdef void safe_del(self, str key)
+ cpdef void validate_keys(self, list valid_keys) except *
+ cpdef object values(self)
+
+ # Private Methods used in BuildStream
+ cpdef void _composite(self, MappingNode target) except *
+ cpdef void _composite_under(self, MappingNode target) except *
+ cpdef list _find(self, Node target)
+
+ # Protected Methods
+ cdef void _compose_on_composite_dict(self, MappingNode target)
+ cdef void _compose_on_list(self, SequenceNode target)
+
+ # Private Methods
+ cdef void __composite(self, MappingNode target, list path=*) except *
+ cdef Node _get(self, str key, default, default_constructor)
+
+
+cdef class ScalarNode(Node):
+ cdef str value
+
+ # Public Methods
+ cpdef bint as_bool(self) except *
+ cpdef int as_int(self) except *
+ cpdef str as_str(self)
+ cpdef bint is_none(self)
+
+
+cdef class SequenceNode(Node):
+ cdef list value
+
+ # Public Methods
+ cpdef void append(self, object value)
+ cpdef list as_str_list(self)
+ cpdef MappingNode mapping_at(self, int index)
+ cpdef Node node_at(self, int index, list allowed_types=*)
+ cpdef ScalarNode scalar_at(self, int index)
+ cpdef SequenceNode sequence_at(self, int index)
+
+
+cdef class ProvenanceInformation:
+
+ cdef readonly Node _node
+ cdef readonly Node _toplevel
+ cdef readonly _project
+ cdef readonly bint _is_synthetic
+ cdef readonly str _filename
+ cdef readonly str _displayname
+ cdef readonly str _shortname
+ cdef readonly int _col
+ cdef readonly int _line
+
+
+cdef int _SYNTHETIC_FILE_INDEX
+cdef Py_ssize_t _create_new_file(str filename, str shortname, str displayname, object project)
+cdef void _set_root_node_for_file(Py_ssize_t file_index, MappingNode contents) except *
diff --git a/src/buildstream/node.pyx b/src/buildstream/node.pyx
new file mode 100644
index 000000000..b76d88bb5
--- /dev/null
+++ b/src/buildstream/node.pyx
@@ -0,0 +1,1556 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg LLP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+# Daniel Silverstone <daniel.silverstone@codethink.co.uk>
+# James Ennis <james.ennis@codethink.co.uk>
+# Benjamin Schubert <bschubert@bloomberg.net>
+
+"""
+Node - Parsed YAML configuration
+================================
+
+This module contains the building blocks for handling YAML configuration.
+
+Everything that is loaded from YAML is encapsulated in such nodes, which
+provide helper methods to validate configuration on access.
+
+Using node methods when reading configuration will ensure that errors
+are always coherently notified to the user.
+
+
+Node types
+----------
+
+The most important classes defined here are:
+
+* :class:`.MappingNode`: represents a YAML Mapping (dictionary)
+* :class:`.ScalarNode`: represents a YAML Scalar (string, boolean, integer)
+* :class:`.SequenceNode`: represents a YAML Sequence (list)
+
+
+Class Reference
+---------------
+"""
+
+import string
+
+from ._exceptions import LoadError, LoadErrorReason
+
+
+# A sentinel to be used as a default argument for functions that need
+# to distinguish between a kwarg set to None and an unset kwarg.
+_sentinel = object()
+
+
+cdef class Node:
+ """This is the base class for YAML document nodes.
+
+ YAML Nodes contain information to describe the provenance of the YAML
+ which resulted in the Node, allowing mapping back from a Node to the place
+ in the file it came from.
+
+ .. note:: You should never need to create a :class:`.Node` manually.
+ If you do, you can create :class:`.Node` from dictionaries with
+ :func:`Node.from_dict() <buildstream.node.Node.from_dict>`.
+ If something else is needed, please open an issue.
+ """
+
+ def __init__(self):
+ raise NotImplementedError("Please do not construct nodes like this. Use Node.from_dict(dict) instead.")
+
+ def __cinit__(self, int file_index, int line, int column, *args):
+ self.file_index = file_index
+ self.line = line
+ self.column = column
+
+ # This is in order to ensure we never add a `Node` to a cache key
+ # as ujson will try to convert objects if they have a `__json__`
+ # attribute.
+ def __json__(self):
+ raise ValueError("Nodes should not be allowed when jsonify-ing data", self)
+
+ #############################################################
+ # Abstract Public Methods #
+ #############################################################
+
+ cpdef Node clone(self):
+ """Clone the node and return the copy.
+
+ Returns:
+ :class:`.Node`: a clone of the current node
+ """
+ raise NotImplementedError()
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ @classmethod
+ def from_dict(cls, dict value):
+ """from_dict(value)
+
+ Create a new node from the given dictionary.
+
+ This is a recursive operation, and will transform every value in the
+ dictionary to a :class:`.Node` instance
+
+ Valid values for keys are `str`
+ Valid values for values are `list`, `dict`, `str`, `int`, `bool` or None.
+ `list` and `dict` can also only contain such types.
+
+ Args:
+ value (dict): dictionary from which to create a node.
+
+ Raises:
+ :class:`TypeError`: when the value cannot be converted to a :class:`Node`
+
+ Returns:
+ :class:`.MappingNode`: a new mapping containing the value
+ """
+ if value:
+ return __new_node_from_dict(value, MappingNode.__new__(
+ MappingNode, __SYNTHETIC_FILE_INDEX, 0, __next_synthetic_counter(), {}))
+ else:
+ # We got an empty dict, we can shortcut
+ return MappingNode.__new__(MappingNode, __SYNTHETIC_FILE_INDEX, 0, __next_synthetic_counter(), {})
+
+ cpdef ProvenanceInformation get_provenance(self):
+ """A convenience accessor to obtain the node's :class:`.ProvenanceInformation`
+
+ The provenance information allows you to inform the user of where
+ a node came. Transforming the information to a string will show the file, line and column
+ in the file where the node is.
+
+ An example usage would be:
+
+ .. code-block:: python
+
+ # With `config` being your node
+ max_jobs_node = config.get_node('max-jobs')
+ max_jobs = max_jobs_node.as_int()
+
+ if max_jobs < 1: # We can't get a negative number of jobs
+ raise LoadError("Error at {}: Max jobs needs to be >= 1".format(
+ max_jobs_node.get_provenance()
+ )
+
+ # Will print something like:
+ # element.bst [line 4, col 7]: Max jobs needs to be >= 1
+
+ Returns:
+ :class:`.ProvenanceInformation`: the provenance information for the node.
+ """
+ return ProvenanceInformation(self)
+
+ #############################################################
+ # Abstract Private Methods used in BuildStream #
+ #############################################################
+
+ # _assert_fully_composited()
+ #
+ # This must be called on a fully loaded and composited node,
+ # after all composition has completed.
+ #
+ # This checks that no more composition directives are present
+ # in the data.
+ #
+ # Raises:
+ # (LoadError): If any assertions fail
+ #
+ cpdef void _assert_fully_composited(self) except *:
+ raise NotImplementedError()
+
+ # _strip_node_info()
+ #
+ # Remove all the node information (provenance) and return
+ # the underlying data as plain python objects (list, dict, str, None)
+ #
+ cpdef object _strip_node_info(self):
+ raise NotImplementedError()
+
+ #############################################################
+ # Abstract Protected Methods #
+ #############################################################
+
+ # _compose_on(key, target, path)
+ #
+ # Compose the current node on the given target.
+ #
+ # Args:
+ # key (str): key on the target on which to compose the current value
+ # target (.Node): target node on which to compose
+ # path (list): path from the root of the target when composing recursively
+ # in order to give accurate error reporting.
+ #
+ # Raises:
+ # (_CompositeError): if an error is encountered during composition
+ #
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ raise NotImplementedError()
+
+ # _is_composite_list
+ #
+ # Checks if the node is a Mapping with array composition
+ # directives.
+ #
+ # Returns:
+ # (bool): True if node was a Mapping containing only
+ # list composition directives
+ #
+ # Raises:
+ # (LoadError): If node was a mapping and contained a mix of
+ # list composition directives and other keys
+ #
+ cdef bint _is_composite_list(self) except *:
+ raise NotImplementedError()
+
+ # _walk_find(target, path)
+ #
+ # Walk the node to search for `target`.
+ #
+ # When this returns `True`, the `path` argument will contain the full path
+ # to the target from the root node.
+ #
+ # Args:
+ # target (.Node): target to find in the node tree
+ # path (list): current path from the root
+ #
+ # Returns:
+ # (bool): whether the target was found in the tree or not
+ #
+ cdef bint _walk_find(self, Node target, list path) except *:
+ raise NotImplementedError()
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ # _shares_position_with(target)
+ #
+ # Check whether the current node is at the same position in its tree as the target.
+ #
+ # This is useful when we want to know if two nodes are 'identical', that is they
+ # are at the exact same position in each respective tree, but do not necessarily
+ # have the same content.
+ #
+ # Args:
+ # target (.Node): the target to compare with the current node.
+ #
+ # Returns:
+ # (bool): whether the two nodes share the same position
+ #
+ cdef bint _shares_position_with(self, Node target):
+ return (self.file_index == target.file_index and
+ self.line == target.line and
+ self.column == target.column)
+
+
+cdef class ScalarNode(Node):
+ """This class represents a Scalar (int, str, bool, None) in a YAML document.
+
+ .. note:: If you need to store another type of scalars, please open an issue
+ on the project.
+
+ .. note:: You should never have to create a :class:`.ScalarNode` directly
+ """
+
+ def __cinit__(self, int file_index, int line, int column, object value):
+ cdef value_type = type(value)
+
+ if value_type is str:
+ value = value.strip()
+ elif value_type is bool:
+ if value:
+ value = "True"
+ else:
+ value = "False"
+ elif value_type is int:
+ value = str(value)
+ elif value is None:
+ pass
+ else:
+ raise ValueError("ScalarNode can only hold str, int, bool or None objects")
+
+ self.value = value
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef bint as_bool(self) except *:
+ """Get the value of the node as a boolean.
+
+ .. note:: BuildStream treats the values 'True' and 'true' as True,
+ and the values 'False' and 'false' as False. Any other
+ string values (such as the valid YAML 'TRUE' or 'FALSE'
+ will be considered as an error)
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value cannot be coerced to
+ a bool correctly.
+
+ Returns:
+ :class:`bool`: the value contained in the node, as a boolean
+ """
+ if type(self.value) is bool:
+ return self.value
+
+ # Don't coerce strings to booleans, this makes "False" strings evaluate to True
+ if self.value in ('True', 'true'):
+ return True
+ elif self.value in ('False', 'false'):
+ return False
+ else:
+ provenance = self.get_provenance()
+ path = provenance._toplevel._find(self)[-1]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, bool.__name__, self.value))
+
+ cpdef int as_int(self) except *:
+ """Get the value of the node as an integer.
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value cannot be coerced to
+ an integer correctly.
+
+ Returns:
+ :class:`int`: the value contained in the node, as a integer
+ """
+ try:
+ return int(self.value)
+ except ValueError:
+ provenance = self.get_provenance()
+ path = provenance._toplevel._find(self)[-1]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, int.__name__))
+
+ cpdef str as_str(self):
+ """Get the value of the node as a string.
+
+ Returns:
+ :class:`str`: the value contained in the node, as a string, or `None` if the content
+ is `None`.
+ """
+ # We keep 'None' as 'None' to simplify the API's usage and allow chaining for users
+ if self.value is None:
+ return None
+ return str(self.value)
+
+ cpdef bint is_none(self):
+ """Determine whether the current scalar is `None`.
+
+ Returns:
+ :class:`bool`: `True` if the value of the scalar is `None`, else `False`
+ """
+ return self.value is None
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef ScalarNode clone(self):
+ return self
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ pass
+
+ cpdef object _strip_node_info(self):
+ return self.value
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ cdef Node target_value = target.value.get(key)
+
+ if target_value is not None and type(target_value) is not ScalarNode:
+ raise __CompositeError(path,
+ "{}: Cannot compose scalar on non-scalar at {}".format(
+ self.get_provenance(),
+ target_value.get_provenance()))
+
+ target.value[key] = self
+
+ cdef bint _is_composite_list(self) except *:
+ return False
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ return self._shares_position_with(target)
+
+
+cdef class MappingNode(Node):
+ """This class represents a Mapping (dict) in a YAML document.
+
+ It behaves mostly like a :class:`dict`, but doesn't allow untyped value access
+ (Nothing of the form :code:`my_dict[my_value]`.
+
+ It also doesn't allow anything else than :class:`str` as keys, to align with YAML.
+
+ You can however use common dict operations in it:
+
+ .. code-block:: python
+
+ # Assign a new value to a key
+ my_mapping[key] = my_value
+
+ # Delete an entry
+ del my_mapping[key]
+
+ When assigning a key/value pair, the key must be a string,
+ and the value can be any of:
+
+ * a :class:`Node`, in which case the node is just assigned like normally
+ * a :class:`list`, :class:`dict`, :class:`int`, :class:`str`, :class:`bool` or :class:`None`.
+ In which case, the value will be converted to a :class:`Node` for you.
+
+ Therefore, all values in a :class:`.MappingNode` will be :class:`Node`.
+
+ .. note:: You should never create an instance directly. Use :func:`Node.from_dict() <buildstream.node.Node.from_dict>`
+ instead, which will ensure your node is correctly formatted.
+ """
+
+ def __cinit__(self, int file_index, int line, int column, dict value):
+ self.value = value
+
+ def __contains__(self, what):
+ return what in self.value
+
+ def __delitem__(self, str key):
+ del self.value[key]
+
+ def __setitem__(self, str key, object value):
+ cdef Node old_value
+
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = __create_node_recursive(value, self)
+
+ # FIXME: Do we really want to override provenance?
+ #
+ # Related to https://gitlab.com/BuildStream/buildstream/issues/1058
+ #
+ # There are only two cases were nodes are set in the code (hence without provenance):
+ # - When automatic variables are set by the core (e-g: max-jobs)
+ # - when plugins call Element.set_public_data
+ #
+ # The first case should never throw errors, so it is of limited interests.
+ #
+ # The second is more important. What should probably be done here is to have 'set_public_data'
+ # able of creating a fake provenance with the name of the plugin, the project and probably the
+ # element name.
+ #
+ # We would therefore have much better error messages, and would be able to get rid of most synthetic
+ # nodes.
+ old_value = self.value.get(key)
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef bint get_bool(self, str key, object default=_sentinel) except *:
+ """get_bool(key, default=sentinel)
+
+ Get the value of the node for `key` as a boolean.
+
+ This is equivalent to: :code:`mapping.get_scalar(my_key, my_default).as_bool()`.
+
+ Args:
+ key (str): key for which to get the value
+ default (bool): default value to return if `key` is not in the mapping
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.ScalarNode` or isn't a
+ valid `boolean`
+
+ Returns:
+ :class:`bool`: the value at `key` or the default
+ """
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_bool()
+
+ cpdef int get_int(self, str key, object default=_sentinel) except *:
+ """get_int(key, default=sentinel)
+
+ Get the value of the node for `key` as an integer.
+
+ This is equivalent to: :code:`mapping.get_scalar(my_key, my_default).as_int()`.
+
+ Args:
+ key (str): key for which to get the value
+ default (int): default value to return if `key` is not in the mapping
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.ScalarNode` or isn't a
+ valid `integer`
+
+ Returns:
+ :class:`int`: the value at `key` or the default
+ """
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_int()
+
+ cpdef MappingNode get_mapping(self, str key, object default=_sentinel):
+ """get_mapping(key, default=sentinel)
+
+ Get the value of the node for `key` as a :class:`.MappingNode`.
+
+ Args:
+ key (str): key for which to get the value
+ default (dict): default value to return if `key` is not in the mapping. It will be converted
+ to a :class:`.MappingNode` before being returned
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.MappingNode`
+
+ Returns:
+ :class:`.MappingNode`: the value at `key` or the default
+ """
+ value = self._get(key, default, MappingNode)
+
+ if type(value) is not MappingNode and value is not None:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'dict'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef Node get_node(self, str key, list allowed_types = None, bint allow_none = False):
+ """get_node(key, allowed_types=None, allow_none=False)
+
+ Get the value of the node for `key` as a :class:`.Node`.
+
+ This is useful if you have configuration that can be either a :class:`.ScalarNode` or
+ a :class:`.MappingNode` for example.
+
+ This method will validate that the value is indeed exactly one of those types (not a subclass)
+ and raise an exception accordingly.
+
+ Args:
+ key (str): key for which to get the value
+ allowed_types (list): list of valid subtypes of :class:`.Node` that are valid return values.
+ If this is `None`, no checks are done on the return value.
+ allow_none (bool): whether to allow the return value to be `None` or not
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not one
+ of the expected types or if it doesn't
+ exist.
+
+ Returns:
+ :class:`.Node`: the value at `key` or `None`
+ """
+ cdef value = self.value.get(key, _sentinel)
+
+ if value is _sentinel:
+ if allow_none:
+ return None
+
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+
+ if allowed_types and type(value) not in allowed_types:
+ provenance = self.get_provenance()
+ human_types = []
+ if MappingNode in allowed_types:
+ human_types.append("dict")
+ if SequenceNode in allowed_types:
+ human_types.append('list')
+ if ScalarNode in allowed_types:
+ human_types.append('scalar')
+
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not one of the following: {}.".format(
+ provenance, key, ", ".join(human_types)))
+
+ return value
+
+ cpdef ScalarNode get_scalar(self, str key, object default=_sentinel):
+ """get_scalar(key, default=sentinel)
+
+ Get the value of the node for `key` as a :class:`.ScalarNode`.
+
+ Args:
+ key (str): key for which to get the value
+ default (str, int, bool, None): default value to return if `key` is not in the mapping.
+ It will be converted to a :class:`.ScalarNode` before being
+ returned.
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.MappingNode`
+
+ Returns:
+ :class:`.ScalarNode`: the value at `key` or the default
+ """
+ value = self._get(key, default, ScalarNode)
+
+ if type(value) is not ScalarNode:
+ if value is None:
+ value = ScalarNode.__new__(ScalarNode, self.file_index, 0, __next_synthetic_counter(), None)
+ else:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'scalar'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef SequenceNode get_sequence(self, str key, object default=_sentinel):
+ """get_sequence(key, default=sentinel)
+
+ Get the value of the node for `key` as a :class:`.SequenceNode`.
+
+ Args:
+ key (str): key for which to get the value
+ default (list): default value to return if `key` is not in the mapping. It will be converted
+ to a :class:`.SequenceNode` before being returned
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.SequenceNode`
+
+ Returns:
+ :class:`.SequenceNode`: the value at `key` or the default
+ """
+ value = self._get(key, default, SequenceNode)
+
+ if type(value) is not SequenceNode and value is not None:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'list'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef str get_str(self, str key, object default=_sentinel):
+ """get_str(key, default=sentinel)
+
+ Get the value of the node for `key` as an string.
+
+ This is equivalent to: :code:`mapping.get_scalar(my_key, my_default).as_str()`.
+
+ Args:
+ key (str): key for which to get the value
+ default (str): default value to return if `key` is not in the mapping
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.ScalarNode` or isn't a
+ valid `str`
+
+ Returns:
+ :class:`str`: the value at `key` or the default
+ """
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_str()
+
+ cpdef object items(self):
+ """Get a new view of the mapping items ((key, value) pairs).
+
+ This is equivalent to running :code:`my_dict.item()` on a `dict`.
+
+ Returns:
+ :class:`dict_items`: a view on the underlying dictionary
+ """
+ return self.value.items()
+
+ cpdef list keys(self):
+ """Get the list of all keys in the mapping.
+
+ This is equivalent to running :code:`my_dict.keys()` on a `dict`.
+
+ Returns:
+ :class:`list`: a list of all keys in the mapping
+ """
+ return list(self.value.keys())
+
+ cpdef void safe_del(self, str key):
+ """safe_del(key)
+
+ Remove the entry at `key` in the dictionary if it exists.
+
+ This method is a safe equivalent to :code:`del mapping[key]`, that doesn't
+ throw anything if the key doesn't exist.
+
+ Args:
+ key (str): key to remove from the mapping
+ """
+ self.value.pop(key, None)
+
+ cpdef void validate_keys(self, list valid_keys) except *:
+ """validate_keys(valid_keys)
+
+ Validate that the node doesn't contain extra keys
+
+ This validates the node so as to ensure the user has not specified
+ any keys which are unrecognized by BuildStream (usually this
+ means a typo which would otherwise not trigger an error).
+
+ Args:
+ valid_keys (list): A list of valid keys for the specified node
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: In the case that the specified node contained
+ one or more invalid keys
+ """
+
+ # Probably the fastest way to do this: https://stackoverflow.com/a/23062482
+ cdef set valid_keys_set = set(valid_keys)
+ cdef str key
+
+ for key in self.value:
+ if key not in valid_keys_set:
+ provenance = self.get_node(key).get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Unexpected key: {}".format(provenance, key))
+
+ cpdef object values(self):
+ """Get the values in the mapping.
+
+ This is equivalent to running :code:`my_dict.values()` on a `dict`.
+
+ Returns:
+ :class:`dict_values`: a list of all values in the mapping
+ """
+ return self.value.values()
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef MappingNode clone(self):
+ cdef dict copy = {}
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ copy[key] = value.clone()
+
+ return MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, copy)
+
+ #############################################################
+ # Private Methods used in BuildStream #
+ #############################################################
+
+ # _composite()
+ #
+ # Compose one mapping node onto another
+ #
+ # Args:
+ # target (Node): The target to compose into
+ #
+ # Raises: LoadError
+ #
+ cpdef void _composite(self, MappingNode target) except *:
+ try:
+ self.__composite(target, [])
+ except __CompositeError as e:
+ source_provenance = self.get_provenance()
+ error_prefix = ""
+ if source_provenance:
+ error_prefix = "{}: ".format(source_provenance)
+ raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
+ "{}Failure composing {}: {}"
+ .format(error_prefix,
+ e.path,
+ e.message)) from e
+
+ # Like self._composite(target), but where values in the target don't get overridden by values in self.
+ #
+ cpdef void _composite_under(self, MappingNode target) except *:
+ target._composite(self)
+
+ cdef str key
+ cdef Node value
+ cdef list to_delete = [key for key in target.value.keys() if key not in self.value]
+
+ for key, value in self.value.items():
+ target.value[key] = value
+ for key in to_delete:
+ del target.value[key]
+
+ # _find()
+ #
+ # Searches the given node tree for the given target node.
+ #
+ # This is typically used when trying to walk a path to a given node
+ # for the purpose of then modifying a similar tree of objects elsewhere
+ #
+ # Args:
+ # target (Node): The node you are looking for in that tree
+ #
+ # Returns:
+ # (list): A path from `node` to `target` or None if `target` is not in the subtree
+ cpdef list _find(self, Node target):
+ cdef list path = []
+ if self._walk_find(target, path):
+ return path
+ return None
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ # Assert that list composition directives dont remain, this
+ # indicates that the user intended to override a list which
+ # never existed in the underlying data
+ #
+ if key in ('(>)', '(<)', '(=)'):
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
+ "{}: Attempt to override non-existing list".format(provenance))
+
+ value._assert_fully_composited()
+
+ cpdef object _strip_node_info(self):
+ cdef str key
+ cdef Node value
+
+ return {key: value._strip_node_info() for key, value in self.value.items()}
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ cdef Node target_value
+
+ if self._is_composite_list():
+ if key not in target.value:
+ # Composite list clobbers empty space
+ target.value[key] = self
+ else:
+ target_value = target.value[key]
+
+ if type(target_value) is SequenceNode:
+ # Composite list composes into a list
+ self._compose_on_list(target_value)
+ elif target_value._is_composite_list():
+ # Composite list merges into composite list
+ self._compose_on_composite_dict(target_value)
+ else:
+ # Else composing on top of normal dict or a scalar, so raise...
+ raise __CompositeError(path,
+ "{}: Cannot compose lists onto {}".format(
+ self.get_provenance(),
+ target_value.get_provenance()))
+ else:
+ # We're composing a dict into target now
+ if key not in target.value:
+ # Target lacks a dict at that point, make a fresh one with
+ # the same provenance as the incoming dict
+ target.value[key] = MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, {})
+
+ self.__composite(target.value[key], path)
+
+ # _compose_on_list(target)
+ #
+ # Compose the current node on the given sequence.
+ #
+ # Args:
+ # target (.SequenceNode): sequence on which to compose the current composite dict
+ #
+ cdef void _compose_on_list(self, SequenceNode target):
+ cdef SequenceNode clobber = self.value.get("(=)")
+ cdef SequenceNode prefix = self.value.get("(<)")
+ cdef SequenceNode suffix = self.value.get("(>)")
+
+ if clobber is not None:
+ target.value.clear()
+ target.value.extend(clobber.value)
+ if prefix is not None:
+ for v in reversed(prefix.value):
+ target.value.insert(0, v)
+ if suffix is not None:
+ target.value.extend(suffix.value)
+
+ # _compose_on_composite_dict(target)
+ #
+ # Compose the current node on the given composite dict.
+ #
+ # A composite dict is a dict that contains composition directives.
+ #
+ # Args:
+ # target (.MappingNode): sequence on which to compose the current composite dict
+ #
+ cdef void _compose_on_composite_dict(self, MappingNode target):
+ cdef SequenceNode clobber = self.value.get("(=)")
+ cdef SequenceNode prefix = self.value.get("(<)")
+ cdef SequenceNode suffix = self.value.get("(>)")
+
+ if clobber is not None:
+ # We want to clobber the target list
+ # which basically means replacing the target list
+ # with ourselves
+ target.value["(=)"] = clobber
+ if prefix is not None:
+ target.value["(<)"] = prefix
+ elif "(<)" in target.value:
+ (<SequenceNode> target.value["(<)"]).value.clear()
+ if suffix is not None:
+ target.value["(>)"] = suffix
+ elif "(>)" in target.value:
+ (<SequenceNode> target.value["(>)"]).value.clear()
+ else:
+ # Not clobbering, so prefix the prefix and suffix the suffix
+ if prefix is not None:
+ if "(<)" in target.value:
+ for v in reversed(prefix.value):
+ (<SequenceNode> target.value["(<)"]).value.insert(0, v)
+ else:
+ target.value["(<)"] = prefix
+ if suffix is not None:
+ if "(>)" in target.value:
+ (<SequenceNode> target.value["(>)"]).value.extend(suffix.value)
+ else:
+ target.value["(>)"] = suffix
+
+ cdef bint _is_composite_list(self) except *:
+ cdef bint has_directives = False
+ cdef bint has_keys = False
+ cdef str key
+
+ for key in self.value.keys():
+ if key in ['(>)', '(<)', '(=)']:
+ has_directives = True
+ else:
+ has_keys = True
+
+ if has_keys and has_directives:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary contains array composition directives and arbitrary keys"
+ .format(provenance))
+
+ return has_directives
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ cdef str k
+ cdef Node v
+
+ if self._shares_position_with(target):
+ return True
+
+ for k, v in self.value.items():
+ path.append(k)
+ if v._walk_find(target, path):
+ return True
+ del path[-1]
+
+ return False
+
+ #############################################################
+ # Private Methods #
+ #############################################################
+
+ # __composite(target, path)
+ #
+ # Helper method to compose the current node on another.
+ #
+ # Args:
+ # target (.MappingNode): target on which to compose the current node
+ # path (list): path from the root of the target when composing recursively
+ # in order to give accurate error reporting.
+ #
+ cdef void __composite(self, MappingNode target, list path=None) except *:
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ path.append(key)
+ value._compose_on(key, target, path)
+ path.pop()
+
+ # _get(key, default, default_constructor)
+ #
+ # Internal helper method to get an entry from the underlying dictionary.
+ #
+ # Args:
+ # key (str): the key for which to retrieve the entry
+ # default (object): default value if the entry is not present
+ # default_constructor (object): method to transform the `default` into a Node
+ # if the entry is not present
+ #
+ # Raises:
+ # (LoadError): if the key is not present and no default has been given.
+ #
+ cdef Node _get(self, str key, object default, object default_constructor):
+ value = self.value.get(key, _sentinel)
+
+ if value is _sentinel:
+ if default is _sentinel:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+
+ if default is None:
+ value = None
+ else:
+ value = default_constructor.__new__(
+ default_constructor, __SYNTHETIC_FILE_INDEX, 0, __next_synthetic_counter(), default)
+
+ return value
+
+
+cdef class SequenceNode(Node):
+ """This class represents a Sequence (list) in a YAML document.
+
+ It behaves mostly like a :class:`list`, but doesn't allow untyped value access
+ (Nothing of the form :code:`my_list[my_value]`).
+
+ You can however perform common list operations on it:
+
+ .. code-block:: python
+
+ # Assign a value
+ my_sequence[key] = value
+
+ # Get the length
+ len(my_sequence)
+
+ # Reverse it
+ reversed(my_sequence)
+
+ # And iter over it
+ for value in my_sequence:
+ print(value)
+
+ All values in a :class:`SequenceNode` will be :class:`Node`.
+ """
+
+ def __cinit__(self, int file_index, int line, int column, list value):
+ self.value = value
+
+ def __iter__(self):
+ return iter(self.value)
+
+ def __len__(self):
+ return len(self.value)
+
+ def __reversed__(self):
+ return reversed(self.value)
+
+ def __setitem__(self, int key, object value):
+ cdef Node old_value
+
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = __create_node_recursive(value, self)
+
+ # FIXME: Do we really want to override provenance?
+ # See __setitem__ on 'MappingNode' for more context
+ old_value = self.value[key]
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef void append(self, object value):
+ """append(value)
+
+ Append the given object to the sequence.
+
+ Args:
+ value (object): the value to append to the list. This can either be:
+
+ - a :class:`Node`
+ - a :class:`int`, :class:`bool`, :class:`str`, :class:`None`,
+ :class:`dict` or :class:`list`. In which case, this will be
+ converted into a :class:`Node` beforehand
+
+ Raises:
+ :class:`TypeError`: when the value cannot be converted to a :class:`Node`
+ """
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value.append(value)
+ else:
+ node = __create_node_recursive(value, self)
+ self.value.append(node)
+
+ cpdef list as_str_list(self):
+ """Get the values of the sequence as a list of strings.
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the sequence contains more than
+ :class:`ScalarNode`
+
+ Returns:
+ :class:`list`: the content of the sequence as a list of strings
+ """
+ return [node.as_str() for node in self.value]
+
+ cpdef MappingNode mapping_at(self, int index):
+ """mapping_at(index)
+
+ Retrieve the entry at `index` as a :class:`.MappingNode`.
+
+ Args:
+ index (int): index for which to get the value
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.MappingNode`
+ :class:`IndexError`: if no value exists at this index
+
+ Returns:
+ :class:`.MappingNode`: the value at `index`
+ """
+ value = self.value[index]
+
+ if type(value) is not MappingNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, MappingNode.__name__))
+ return value
+
+ cpdef Node node_at(self, int index, list allowed_types = None):
+ """node_at(index, allowed_types=None)
+
+ Retrieve the entry at `index` as a :class:`.Node`.
+
+ This is useful if you have configuration that can be either a :class:`.ScalarNode` or
+ a :class:`.MappingNode` for example.
+
+ This method will validate that the value is indeed exactly one of those types (not a subclass)
+ and raise an exception accordingly.
+
+ Args:
+ index (int): index for which to get the value
+ allowed_types (list): list of valid subtypes of :class:`.Node` that are valid return values.
+ If this is `None`, no checks are done on the return value.
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `index` is not of one of the
+ expected types
+ :class:`IndexError`: if no value exists at this index
+
+ Returns:
+ :class:`.Node`: the value at `index`
+ """
+ cdef value = self.value[index]
+
+ if allowed_types and type(value) not in allowed_types:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not one of the following: {}.".format(
+ provenance, index, ", ".join(allowed_types)))
+
+ return value
+
+ cpdef ScalarNode scalar_at(self, int index):
+ """scalar_at(index)
+
+ Retrieve the entry at `index` as a :class:`.ScalarNode`.
+
+ Args:
+ index (int): index for which to get the value
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.ScalarNode`
+ :class:`IndexError`: if no value exists at this index
+
+ Returns:
+ :class:`.ScalarNode`: the value at `index`
+ """
+ value = self.value[index]
+
+ if type(value) is not ScalarNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, ScalarNode.__name__))
+ return value
+
+ cpdef SequenceNode sequence_at(self, int index):
+ """sequence_at(index)
+
+ Retrieve the entry at `index` as a :class:`.SequenceNode`.
+
+ Args:
+ index (int): index for which to get the value
+
+ Raises:
+ :class:`buildstream._exceptions.LoadError`: if the value at `key` is not a
+ :class:`.SequenceNode`
+ :class:`IndexError`: if no value exists at this index
+
+ Returns:
+ :class:`.SequenceNode`: the value at `index`
+ """
+ value = self.value[index]
+
+ if type(value) is not SequenceNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, SequenceNode.__name__))
+
+ return value
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef SequenceNode clone(self):
+ cdef list copy = []
+ cdef Node entry
+
+ for entry in self.value:
+ copy.append(entry.clone())
+
+ return SequenceNode.__new__(SequenceNode, self.file_index, self.line, self.column, copy)
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ cdef Node value
+ for value in self.value:
+ value._assert_fully_composited()
+
+ cpdef object _strip_node_info(self):
+ cdef Node value
+ return [value._strip_node_info() for value in self.value]
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ # List clobbers anything list-like
+ cdef Node target_value = target.value.get(key)
+
+ if not (target_value is None or
+ type(target_value) is SequenceNode or
+ target_value._is_composite_list()):
+ raise __CompositeError(path,
+ "{}: List cannot overwrite {} at: {}"
+ .format(self.get_provenance(),
+ key,
+ target_value.get_provenance()))
+ # Looks good, clobber it
+ target.value[key] = self
+
+ cdef bint _is_composite_list(self) except *:
+ return False
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ cdef int i
+ cdef Node v
+
+ if self._shares_position_with(target):
+ return True
+
+ for i, v in enumerate(self.value):
+ path.append(i)
+ if v._walk_find(target, path):
+ return True
+ del path[-1]
+
+ return False
+
+
+# Returned from Node.get_provenance
+cdef class ProvenanceInformation:
+ """Represents the location of a YAML node in a file.
+
+ This can effectively be used as a pretty print to display those information in
+ errors consistently.
+
+ You can retrieve this information for a :class:`Node` with
+ :func:`Node.get_provenance() <buildstream.node.Node.get_provenance()>`
+ """
+
+ def __init__(self, Node nodeish):
+ cdef __FileInfo fileinfo
+
+ self._node = nodeish
+ if (nodeish is None) or (nodeish.file_index == __SYNTHETIC_FILE_INDEX):
+ self._filename = ""
+ self._shortname = ""
+ self._displayname = ""
+ self._line = 1
+ self._col = 0
+ self._toplevel = None
+ self._project = None
+ else:
+ fileinfo = <__FileInfo> __FILE_LIST[nodeish.file_index]
+ self._filename = fileinfo.filename
+ self._shortname = fileinfo.shortname
+ self._displayname = fileinfo.displayname
+ # We add 1 here to convert from computerish to humanish
+ self._line = nodeish.line + 1
+ self._col = nodeish.column
+ self._toplevel = fileinfo.toplevel
+ self._project = fileinfo.project
+ self._is_synthetic = (self._filename == '') or (self._col < 0)
+
+ # Convert a Provenance to a string for error reporting
+ def __str__(self):
+ if self._is_synthetic:
+ return "{} [synthetic node]".format(self._displayname)
+ else:
+ return "{} [line {:d} column {:d}]".format(self._displayname, self._line, self._col)
+
+
+#############################################################
+# BuildStream Private methods #
+#############################################################
+
+# _assert_symbol_name()
+#
+# A helper function to check if a loaded string is a valid symbol
+# name and to raise a consistent LoadError if not. For strings which
+# are required to be symbols.
+#
+# Args:
+# symbol_name (str): The loaded symbol name
+# purpose (str): The purpose of the string, for an error message
+# ref_node (Node): The node of the loaded symbol, or None
+# allow_dashes (bool): Whether dashes are allowed for this symbol
+#
+# Raises:
+# LoadError: If the symbol_name is invalid
+#
+# Note that dashes are generally preferred for variable names and
+# usage in YAML, but things such as option names which will be
+# evaluated with jinja2 cannot use dashes.
+def _assert_symbol_name(str symbol_name, str purpose, *, Node ref_node=None, bint allow_dashes=True):
+ cdef str valid_chars = string.digits + string.ascii_letters + '_'
+ if allow_dashes:
+ valid_chars += '-'
+
+ cdef bint valid = True
+ if not symbol_name:
+ valid = False
+ elif any(x not in valid_chars for x in symbol_name):
+ valid = False
+ elif symbol_name[0] in string.digits:
+ valid = False
+
+ if not valid:
+ detail = "Symbol names must contain only alphanumeric characters, " + \
+ "may not start with a digit, and may contain underscores"
+ if allow_dashes:
+ detail += " or dashes"
+
+ message = "Invalid symbol name for {}: '{}'".format(purpose, symbol_name)
+ if ref_node:
+ provenance = ref_node.get_provenance()
+ if provenance is not None:
+ message = "{}: {}".format(provenance, message)
+
+ raise LoadError(LoadErrorReason.INVALID_SYMBOL_NAME,
+ message, detail=detail)
+
+
+# _create_new_file(filename, shortname, displayname, toplevel, project)
+#
+# Create a new synthetic file and return it's index in the `._FILE_LIST`.
+#
+# Args:
+# filename (str): the name to give to the file
+# shortname (str): a shorter name used when showing information on the screen
+# displayname (str): the name to give when reporting errors
+# project (object): project with which to associate the current file (when dealing with junctions)
+#
+# Returns:
+# (int): the index in the `._FILE_LIST` that identifies the new file
+#
+cdef Py_ssize_t _create_new_file(str filename, str shortname, str displayname, object project):
+ cdef Py_ssize_t file_number = len(__FILE_LIST)
+ __FILE_LIST.append(__FileInfo(filename, shortname, displayname, None, project))
+
+ return file_number
+
+
+# _set_root_node_for_file(file_index, contents)
+#
+# Set the root node for the given file
+#
+# Args:
+# file_index (int): the index in the `._FILE_LIST` for the file for which to set the root
+# contents (.MappingNode): node that should be the root for the file
+#
+cdef void _set_root_node_for_file(Py_ssize_t file_index, MappingNode contents) except *:
+ cdef __FileInfo f_info
+
+ if file_index != __SYNTHETIC_FILE_INDEX:
+ f_info = <__FileInfo> __FILE_LIST[file_index]
+ f_info.toplevel = contents
+
+
+# _new_synthetic_file()
+#
+# Create a new synthetic mapping node, with an associated file entry
+# (in _FILE_LIST) such that later tracking can correctly determine which
+# file needs writing to in order to persist the changes.
+#
+# Args:
+# filename (str): The name of the synthetic file to create
+# project (Project): The optional project to associate this synthetic file with
+#
+# Returns:
+# (Node): An empty YAML mapping node, whose provenance is to this new
+# synthetic file
+#
+def _new_synthetic_file(str filename, object project=None):
+ cdef Py_ssize_t file_index = len(__FILE_LIST)
+ cdef Node node = MappingNode.__new__(MappingNode, file_index, 0, 0, {})
+
+ __FILE_LIST.append(__FileInfo(filename,
+ filename,
+ "<synthetic {}>".format(filename),
+ node,
+ project))
+ return node
+
+
+#############################################################
+# Module local helper Methods #
+#############################################################
+
+# Purely synthetic nodes will have _SYNTHETIC_FILE_INDEX for the file number, have line number
+# zero, and a negative column number which comes from inverting the next value
+# out of this counter. Synthetic nodes created with a reference node will
+# have a file number from the reference node, some unknown line number, and
+# a negative column number from this counter.
+cdef int __SYNTHETIC_FILE_INDEX = -1
+
+# File name handling
+cdef list __FILE_LIST = []
+
+# synthetic counter for synthetic nodes
+cdef int __counter = 0
+
+
+class __CompositeError(Exception):
+ def __init__(self, path, message):
+ super().__init__(message)
+ self.path = path
+ self.message = message
+
+
+# Metadata container for a yaml toplevel node.
+#
+# This class contains metadata around a yaml node in order to be able
+# to trace back the provenance of a node to the file.
+#
+cdef class __FileInfo:
+
+ cdef str filename, shortname, displayname
+ cdef MappingNode toplevel,
+ cdef object project
+
+ def __init__(self, str filename, str shortname, str displayname, MappingNode toplevel, object project):
+ self.filename = filename
+ self.shortname = shortname
+ self.displayname = displayname
+ self.toplevel = toplevel
+ self.project = project
+
+
+cdef int __next_synthetic_counter():
+ global __counter
+ __counter -= 1
+ return __counter
+
+
+cdef Node __create_node_recursive(object value, Node ref_node):
+ cdef value_type = type(value)
+
+ if value_type is list:
+ node = __new_node_from_list(value, ref_node)
+ elif value_type in [int, str, bool]:
+ node = ScalarNode.__new__(ScalarNode, ref_node.file_index, ref_node.line, __next_synthetic_counter(), value)
+ elif value_type is dict:
+ node = __new_node_from_dict(value, ref_node)
+ else:
+ raise TypeError(
+ "Unable to assign a value of type {} to a Node.".format(value_type))
+
+ return node
+
+
+# _new_node_from_dict()
+#
+# Args:
+# indict (dict): The input dictionary
+# ref_node (Node): The dictionary to take as reference for position
+#
+# Returns:
+# (Node): A new synthetic YAML tree which represents this dictionary
+#
+cdef Node __new_node_from_dict(dict indict, Node ref_node):
+ cdef MappingNode ret = MappingNode.__new__(
+ MappingNode, ref_node.file_index, ref_node.line, __next_synthetic_counter(), {})
+ cdef str k
+
+ for k, v in indict.items():
+ ret.value[k] = __create_node_recursive(v, ref_node)
+
+ return ret
+
+
+# Internal function to help new_node_from_dict() to handle lists
+cdef Node __new_node_from_list(list inlist, Node ref_node):
+ cdef SequenceNode ret = SequenceNode.__new__(
+ SequenceNode, ref_node.file_index, ref_node.line, __next_synthetic_counter(), [])
+
+ for v in inlist:
+ ret.value.append(__create_node_recursive(v, ref_node))
+
+ return ret
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index de969c267..9a322ab81 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -116,7 +116,6 @@ import sys
from contextlib import contextmanager
from weakref import WeakValueDictionary
-from . import _yaml
from . import utils
from ._exceptions import PluginError, ImplError
from ._message import Message, MessageType
@@ -271,7 +270,7 @@ class Plugin():
"""Configure the Plugin from loaded configuration data
Args:
- node (dict): The loaded configuration dictionary
+ node (:class:`MappingNode <buildstream.node.MappingNode>`): The loaded configuration dictionary
Raises:
:class:`.SourceError`: If it's a :class:`.Source` implementation
@@ -280,13 +279,7 @@ class Plugin():
Plugin implementors should implement this method to read configuration
data and store it.
- Plugins should use the :func:`Plugin.node_get_member() <buildstream.plugin.Plugin.node_get_member>`
- and :func:`Plugin.node_get_list_element() <buildstream.plugin.Plugin.node_get_list_element>`
- methods to fetch values from the passed `node`. This will ensure that a nice human readable error
- message will be raised if the expected configuration is not found, indicating the filename,
- line and column numbers.
-
- Further the :func:`Plugin.node_validate() <buildstream.plugin.Plugin.node_validate>` method
+ The :func:`MappingNode.validate_keys() <buildstream._yaml.MappingNode.validate_keys>` method
should be used to ensure that the user has not specified keys in `node` which are unsupported
by the plugin.
@@ -294,8 +287,7 @@ class Plugin():
For Elements, when variable substitution is desirable, the
:func:`Element.node_subst_member() <buildstream.element.Element.node_subst_member>`
- and :func:`Element.node_subst_list_element() <buildstream.element.Element.node_subst_list_element>`
- methods can be used.
+ method can be used.
"""
raise ImplError("{tag} plugin '{kind}' does not implement configure()".format(
tag=self.__type_tag, kind=self.get_kind()))
@@ -354,114 +346,7 @@ class Plugin():
"""
return self.__kind
- def node_items(self, node):
- """Iterate over a dictionary loaded from YAML
-
- Args:
- node (Node): The YAML loaded dictionary object
-
- Returns:
- list: List of key/value tuples to iterate over
-
- BuildStream holds some private data in dictionaries loaded from
- the YAML in order to preserve information to report in errors.
-
- This convenience function should be used instead of the dict.items()
- builtin function provided by python.
- """
- yield from _yaml.node_items(node)
-
- def node_provenance(self, node, member_name=None):
- """Gets the provenance for `node` and `member_name`
-
- This reports a string with file, line and column information suitable
- for reporting an error or warning.
-
- Args:
- node (Node): The YAML loaded dictionary object
- member_name (str): The name of the member to check, or None for the node itself
-
- Returns:
- (str): A string describing the provenance of the node and member
- """
- provenance = _yaml.node_get_provenance(node, key=member_name)
- return str(provenance)
-
- def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel, *, allow_none=False):
- """Fetch the value of a node member, raising an error if the value is
- missing or incorrectly typed.
-
- Args:
- node (Node): A dictionary loaded from YAML
- expected_type (type): The expected type of the node member
- member_name (str): The name of the member to fetch
- default (expected_type): A value to return when *member_name* is not specified in *node*
- allow_none (bool): Allow explicitly set None values in the YAML (*Since: 1.4*)
-
- Returns:
- The value of *member_name* in *node*, otherwise *default*
-
- Raises:
- :class:`.LoadError`: When *member_name* is not found and no *default* was provided
-
- Note:
- Returned strings are stripped of leading and trailing whitespace
-
- **Example:**
-
- .. code:: python
-
- # Expect a string 'name' in 'node'
- name = self.node_get_member(node, str, 'name')
-
- # Fetch an optional integer
- level = self.node_get_member(node, int, 'level', -1)
- """
- return _yaml.node_get(node, expected_type, member_name, default_value=default, allow_none=allow_none)
-
- def node_set_member(self, node, key, value):
- """Set the value of a node member
- Args:
- node (node): A dictionary loaded from YAML
- key (str): The key name
- value: The value
-
- Returns:
- None
-
- Raises:
- None
-
- **Example:**
-
- .. code:: python
-
- # Set a string 'tomjon' in node[name]
- self.node_set_member(node, 'name', 'tomjon')
- """
- _yaml.node_set(node, key, value)
-
- def new_empty_node(self):
- """Create an empty 'Node' object to be handled by BuildStream's core
- Args:
- None
-
- Returns:
- Node: An empty Node object
-
- Raises:
- None
-
- **Example:**
-
- .. code:: python
-
- # Create an empty Node object to store metadata information
- metadata = self.new_empty_node()
- """
- return _yaml.new_empty_node()
-
- def node_get_project_path(self, node, key, *,
+ def node_get_project_path(self, node, *,
check_is_file=False, check_is_dir=False):
"""Fetches a project path from a dictionary node and validates it
@@ -475,8 +360,7 @@ class Plugin():
``True``.
Args:
- node (dict): A dictionary loaded from YAML
- key (str): The key whose value contains a path to validate
+ node (ScalarNode): A Node loaded from YAML containing the path to validate
check_is_file (bool): If ``True`` an error will also be raised
if path does not point to a regular file.
Defaults to ``False``
@@ -501,70 +385,10 @@ class Plugin():
"""
- return self.__project.get_path_from_node(node, key,
+ return self.__project.get_path_from_node(node,
check_is_file=check_is_file,
check_is_dir=check_is_dir)
- def node_validate(self, node, valid_keys):
- """This should be used in :func:`~buildstream.plugin.Plugin.configure`
- implementations to assert that users have only entered
- valid configuration keys.
-
- Args:
- node (dict): A dictionary loaded from YAML
- valid_keys (iterable): A list of valid keys for the node
-
- Raises:
- :class:`.LoadError`: When an invalid key is found
-
- **Example:**
-
- .. code:: python
-
- # Ensure our node only contains valid autotools config keys
- self.node_validate(node, [
- 'configure-commands', 'build-commands',
- 'install-commands', 'strip-commands'
- ])
-
- """
- _yaml.node_validate(node, valid_keys)
-
- def node_get_list_element(self, node, expected_type, member_name, indices):
- """Fetch the value of a list element from a node member, raising an error if the
- value is incorrectly typed.
-
- Args:
- node (dict): A dictionary loaded from YAML
- expected_type (type): The expected type of the node member
- member_name (str): The name of the member to fetch
- indices (list of int): List of indices to search, in case of nested lists
-
- Returns:
- The value of the list element in *member_name* at the specified *indices*
-
- Raises:
- :class:`.LoadError`
-
- Note:
- Returned strings are stripped of leading and trailing whitespace
-
- **Example:**
-
- .. code:: python
-
- # Fetch the list itself
- things = self.node_get_member(node, list, 'things')
-
- # Iterate over the list indices
- for i in range(len(things)):
-
- # Fetch dict things
- thing = self.node_get_list_element(
- node, dict, 'things', [ i ])
- """
- return _yaml.node_get(node, expected_type, member_name, indices=indices)
-
def debug(self, brief, *, detail=None):
"""Print a debugging message
@@ -812,7 +636,7 @@ class Plugin():
# _get_configuring() state is up to date.
#
# Args:
- # node (dict): The loaded configuration dictionary
+ # node (buildstream.node.MappingNode): The loaded configuration dictionary
#
def _configure(self, node):
self.__configuring = True
@@ -893,11 +717,11 @@ class Plugin():
silenced_warnings = set()
project = self.__project
- for key, value in self.node_items(project.element_overrides):
- if _yaml.node_get(value, bool, 'suppress-deprecation-warnings', default_value=False):
+ for key, value in project.element_overrides.items():
+ if value.get_bool('suppress-deprecation-warnings', default=False):
silenced_warnings.add(key)
- for key, value in self.node_items(project.source_overrides):
- if _yaml.node_get(value, bool, 'suppress-deprecation-warnings', default_value=False):
+ for key, value in project.source_overrides.items():
+ if value.get_bool('suppress-deprecation-warnings', default=False):
silenced_warnings.add(key)
return self.get_kind() in silenced_warnings
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index b672cde0c..83501d817 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -59,16 +59,16 @@ class ComposeElement(Element):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- self.node_validate(node, [
+ node.validate_keys([
'integrate', 'include', 'exclude', 'include-orphans'
])
# We name this variable 'integration' only to avoid
# collision with the Element.integrate() method.
- self.integration = self.node_get_member(node, bool, 'integrate')
- self.include = self.node_get_member(node, list, 'include')
- self.exclude = self.node_get_member(node, list, 'exclude')
- self.include_orphans = self.node_get_member(node, bool, 'include-orphans')
+ self.integration = node.get_bool('integrate')
+ self.include = node.get_sequence('include').as_str_list()
+ self.exclude = node.get_sequence('exclude').as_str_list()
+ self.include_orphans = node.get_bool('include-orphans')
def preflight(self):
pass
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index 45847e685..c2c2e0125 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,15 +167,16 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- self.node_validate(node, [
+ node.validate_keys([
'include', 'exclude', 'include-orphans'
])
- self.include = self.node_get_member(node, list, 'include')
- self.exclude = self.node_get_member(node, list, 'exclude')
- self.include_orphans = self.node_get_member(node, bool, 'include-orphans')
- self.include_provenance = self.node_provenance(node, member_name='include')
- self.exclude_provenance = self.node_provenance(node, member_name='exclude')
+ self.include_node = node.get_sequence('include')
+ self.exclude_node = node.get_sequence('exclude')
+
+ self.include = self.include_node.as_str_list()
+ self.exclude = self.exclude_node.as_str_list()
+ self.include_orphans = node.get_bool('include-orphans')
def preflight(self):
# Exactly one build-depend is permitted
@@ -217,7 +218,7 @@ class FilterElement(Element):
for dep in self.dependencies(Scope.BUILD, recurse=False):
# Check that all the included/excluded domains exist
pub_data = dep.get_public_data('bst')
- split_rules = self.node_get_member(pub_data, dict, 'split-rules', {})
+ split_rules = pub_data.get_mapping('split-rules', {})
unfound_includes = []
for domain in self.include:
if domain not in split_rules:
@@ -229,11 +230,11 @@ class FilterElement(Element):
detail = []
if unfound_includes:
- detail.append("Unknown domains were used in {}".format(self.include_provenance))
+ detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
detail.extend([' - {}'.format(domain) for domain in unfound_includes])
if unfound_excludes:
- detail.append("Unknown domains were used in {}".format(self.exclude_provenance))
+ detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
if detail:
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index 61e353dbc..6ae8cef46 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -45,7 +45,7 @@ class ImportElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- self.node_validate(node, [
+ node.validate_keys([
'source', 'target'
])
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index 4222de360..b21ef0777 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -175,9 +175,9 @@ class JunctionElement(Element):
BST_FORBID_RDEPENDS = True
def configure(self, node):
- self.path = self.node_get_member(node, str, 'path', default='')
- self.options = self.node_get_member(node, dict, 'options', default={})
- self.target = self.node_get_member(node, str, 'target', default=None)
+ self.path = node.get_str('path', default='')
+ self.options = node.get_mapping('options', default={})
+ self.target = node.get_str('target', default=None)
self.target_element = None
self.target_junction = None
@@ -188,7 +188,7 @@ class JunctionElement(Element):
# 3. config['path']
if self.target and any(self.sources()):
raise ElementError("junction elements cannot define both 'sources' and 'target' config option")
- if self.target and any(self.node_items(self.options)):
+ if self.target and any(self.options.items()):
raise ElementError("junction elements cannot define both 'options' and 'target'")
if self.target and self.path:
raise ElementError("junction elements cannot define both 'path' and 'target'")
diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py
index 0d194dcc1..a7b53e422 100644
--- a/src/buildstream/plugins/elements/script.py
+++ b/src/buildstream/plugins/elements/script.py
@@ -46,12 +46,12 @@ class ScriptElement(buildstream.ScriptElement):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- for n in self.node_get_member(node, list, 'layout', []):
+ for n in node.get_sequence('layout', []):
dst = self.node_subst_member(n, 'destination')
elm = self.node_subst_member(n, 'element', None)
self.layout_add(elm, dst)
- self.node_validate(node, [
+ node.validate_keys([
'commands', 'root-read-only', 'layout'
])
@@ -60,8 +60,7 @@ class ScriptElement(buildstream.ScriptElement):
self.set_work_dir()
self.set_install_root()
- self.set_root_read_only(self.node_get_member(node, bool,
- 'root-read-only', False))
+ self.set_root_read_only(node.get_bool('root-read-only', default=False))
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index b9b15e268..10418691e 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -77,8 +77,8 @@ class DownloadableFileSource(Source):
__urlopener = None
def configure(self, node):
- self.original_url = self.node_get_member(node, str, 'url')
- self.ref = self.node_get_member(node, str, 'ref', None)
+ self.original_url = node.get_str('url')
+ self.ref = node.get_str('ref', None)
self.url = self.translate_url(self.original_url)
self._warn_deprecated_etag(node)
@@ -99,7 +99,7 @@ class DownloadableFileSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = self.node_get_member(node, str, 'ref', None)
+ self.ref = node.get_str('ref', None)
self._warn_deprecated_etag(node)
def get_ref(self):
@@ -143,9 +143,9 @@ class DownloadableFileSource(Source):
.format(self.url, sha256, self.ref))
def _warn_deprecated_etag(self, node):
- etag = self.node_get_member(node, str, 'etag', None)
+ etag = node.get_str('etag', None)
if etag:
- provenance = self.node_provenance(node, member_name='etag')
+ provenance = node.get_scalar(etag).get_provenance()
self.warn('{} "etag" is deprecated and ignored.'.format(provenance))
def _get_etag(self, ref):
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index e59986da6..6fccf1e8b 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -67,11 +67,11 @@ class BzrSource(Source):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
- self.node_validate(node, ['url', 'track', 'ref', *Source.COMMON_CONFIG_KEYS])
+ node.validate_keys(['url', 'track', 'ref', *Source.COMMON_CONFIG_KEYS])
- self.original_url = self.node_get_member(node, str, 'url')
- self.tracking = self.node_get_member(node, str, 'track')
- self.ref = self.node_get_member(node, str, 'ref', None)
+ self.original_url = node.get_str('url')
+ self.tracking = node.get_str('track')
+ self.ref = node.get_str('ref', None)
self.url = self.translate_url(self.original_url)
def preflight(self):
@@ -93,7 +93,7 @@ class BzrSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = self.node_get_member(node, str, 'ref', None)
+ self.ref = node.get_str('ref', None)
def get_ref(self):
return self.ref
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index e45994951..cc88cf53c 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -61,7 +61,7 @@ class DebSource(TarSource):
def configure(self, node):
super().configure(node)
- self.base_dir = self.node_get_member(node, str, 'base-dir', None)
+ self.base_dir = node.get_str('base-dir', None)
def preflight(self):
return
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index fba8af604..e28098c38 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -54,8 +54,8 @@ class LocalSource(Source):
self.__unique_key = None
def configure(self, node):
- self.node_validate(node, ['path', *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node, 'path')
+ node.validate_keys(['path', *Source.COMMON_CONFIG_KEYS])
+ self.path = self.node_get_project_path(node.get_scalar('path'))
self.fullpath = os.path.join(self.get_project_directory(), self.path)
def preflight(self):
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index e42868264..1e70039bd 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -55,9 +55,9 @@ class PatchSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_STAGE = True
def configure(self, node):
- self.path = self.node_get_project_path(node, 'path',
+ self.path = self.node_get_project_path(node.get_scalar('path'),
check_is_file=True)
- self.strip_level = self.node_get_member(node, int, "strip-level", 1)
+ self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
def preflight(self):
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 9d6c40d74..78c11fd89 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -109,19 +109,19 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- self.node_validate(node, ['url', 'packages', 'ref', 'requirements-files'] +
+ node.validate_keys(['url', 'packages', 'ref', 'requirements-files'] +
Source.COMMON_CONFIG_KEYS)
- self.ref = self.node_get_member(node, str, 'ref', None)
- self.original_url = self.node_get_member(node, str, 'url', _PYPI_INDEX_URL)
+ self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str('url', _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
- self.packages = self.node_get_member(node, list, 'packages', [])
- self.requirements_files = self.node_get_member(node, list, 'requirements-files', [])
+ self.packages = node.get_sequence('packages', []).as_str_list()
+ self.requirements_files = node.get_sequence('requirements-files', []).as_str_list()
if not (self.packages or self.requirements_files):
raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
def preflight(self):
- # Try to find a pip version that supports download command
+ # Try to find a pip version that spports download command
self.host_pip = None
for python in reversed(_PYTHON_VERSIONS):
try:
@@ -150,7 +150,7 @@ class PipSource(Source):
return self.ref
def load_ref(self, node):
- self.ref = self.node_get_member(node, str, 'ref', None)
+ self.ref = node.get_str('ref', None)
def set_ref(self, ref, node):
node['ref'] = self.ref = ref
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 562a8f226..68aa577fc 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -62,13 +62,13 @@ class RemoteSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
- self.executable = self.node_get_member(node, bool, 'executable', False)
+ self.filename = node.get_str('filename', os.path.basename(self.url))
+ self.executable = node.get_bool('executable', default=False)
if os.sep in self.filename:
raise SourceError('{}: filename parameter cannot contain directories'.format(self),
reason="filename-contains-directory")
- self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index e9e0fda27..3e9018bba 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -72,9 +72,8 @@ class TarSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = self.node_get_member(node, str, 'base-dir', '*')
-
- self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str('base-dir', '*')
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
def preflight(self):
self.host_lzip = None
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 9981f1260..322be58d7 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -72,9 +72,8 @@ class ZipSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = self.node_get_member(node, str, 'base-dir', '*')
-
- self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str('base-dir', '*')
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 075a69a2b..20298c1ce 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -27,6 +27,7 @@ from functools import partial
import grpc
from .. import utils
+from ..node import Node
from .._message import Message, MessageType
from .sandbox import Sandbox, SandboxCommandError, _SandboxBatch
from ..storage.directory import VirtualDirectoryError
@@ -112,41 +113,41 @@ class SandboxRemote(Sandbox):
def specs_from_config_node(config_node, basedir=None):
def require_node(config, keyname):
- val = _yaml.node_get(config, dict, keyname, default_value=None)
+ val = config.get_mapping(keyname, default=None)
if val is None:
- provenance = _yaml.node_get_provenance(remote_config, key=keyname)
+ provenance = remote_config.get_provenance()
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"{}: '{}' was not present in the remote "
"execution configuration (remote-execution). "
.format(str(provenance), keyname))
return val
- remote_config = _yaml.node_get(config_node, dict, 'remote-execution', default_value=None)
+ remote_config = config_node.get_mapping('remote-execution', default=None)
if remote_config is None:
return None
service_keys = ['execution-service', 'storage-service', 'action-cache-service']
- _yaml.node_validate(remote_config, ['url', *service_keys])
+ remote_config.validate_keys(['url', *service_keys])
exec_config = require_node(remote_config, 'execution-service')
storage_config = require_node(remote_config, 'storage-service')
- action_config = _yaml.node_get(remote_config, dict, 'action-cache-service', default_value={})
+ action_config = remote_config.get_mapping('action-cache-service', default={})
tls_keys = ['client-key', 'client-cert', 'server-cert']
- _yaml.node_validate(exec_config, ['url', 'instance-name', *tls_keys])
- _yaml.node_validate(storage_config, ['url', 'instance-name', *tls_keys])
+ exec_config.validate_keys(['url', 'instance-name', *tls_keys])
+ storage_config.validate_keys(['url', 'instance-name', *tls_keys])
if action_config:
- _yaml.node_validate(action_config, ['url', 'instance-name', *tls_keys])
+ action_config.validate_keys(['url', 'instance-name', *tls_keys])
# Maintain some backwards compatibility with older configs, in which
# 'url' was the only valid key for remote-execution:
if 'url' in remote_config:
if 'execution-service' not in remote_config:
- exec_config = _yaml.new_node_from_dict({'url': remote_config['url']})
+ exec_config = Node.from_dict({'url': remote_config['url']})
else:
- provenance = _yaml.node_get_provenance(remote_config, key='url')
+ provenance = remote_config.get_node('url').get_provenance()
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"{}: 'url' and 'execution-service' keys were found in the remote "
"execution configuration (remote-execution). "
@@ -164,7 +165,7 @@ class SandboxRemote(Sandbox):
for config_key, config in zip(service_keys, service_configs):
# Either both or none of the TLS client key/cert pair must be specified:
if ('client-key' in config) != ('client-cert' in config):
- provenance = _yaml.node_get_provenance(remote_config, key=config_key)
+ provenance = remote_config.get_node(config_key).get_provenance()
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
"{}: TLS client key/cert pair is incomplete. "
"You must specify both 'client-key' and 'client-cert' "
@@ -173,9 +174,10 @@ class SandboxRemote(Sandbox):
for tls_key in tls_keys:
if tls_key in config:
- _yaml.node_set(config, tls_key, resolve_path(_yaml.node_get(config, str, tls_key)))
+ config[tls_key] = resolve_path(config.get_str(tls_key))
- return RemoteExecutionSpec(*[_yaml.node_sanitize(conf) for conf in service_configs])
+ # TODO: we should probably not be stripping node info and rather load files the safe way
+ return RemoteExecutionSpec(*[conf._strip_node_info() for conf in service_configs])
def run_remote_command(self, channel, action_digest):
# Sends an execution request to the remote execution server.
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index f8b5d3f88..59b6d3644 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -258,8 +258,7 @@ class Source(Plugin):
All Sources derive from this class, this interface defines how
the core will be interacting with Sources.
"""
- __defaults = {} # The defaults from the project
- __defaults_set = False # Flag, in case there are not defaults at all
+ __defaults = None # The defaults from the project
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
@@ -308,7 +307,7 @@ class Source(Plugin):
"""
def __init__(self, context, project, meta, *, alias_override=None, unique_id=None):
- provenance = _yaml.node_get_provenance(meta.config)
+ provenance = meta.config.get_provenance()
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
context, project, provenance, "source", unique_id=unique_id)
@@ -337,7 +336,7 @@ class Source(Plugin):
"""Common source config keys
Source config keys that must not be accessed in configure(), and
- should be checked for using node_validate().
+ should be checked for using node.validate_keys().
"""
#############################################################
@@ -355,7 +354,7 @@ class Source(Plugin):
"""Loads the *ref* for this Source from the specified *node*.
Args:
- node (dict): The YAML node to load the ref from
+ node (:class:`MappingNode <buildstream.node.MappingNode>`): The YAML node to load the ref from
.. note::
@@ -392,8 +391,8 @@ class Source(Plugin):
Args:
ref (simple object): The internal source reference to set, or ``None``
- node (dict): The same dictionary which was previously passed
- to :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>`
+ node (:class:`MappingNode <buildstream.node.MappingNode>`): The same dictionary which was previously passed
+ to :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>`
See :func:`Source.get_ref() <buildstream.source.Source.get_ref>`
for a discussion on the *ref* parameter.
@@ -869,27 +868,28 @@ class Source(Plugin):
node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
if project is toplevel and not node:
- node = provenance.node
+ node = provenance._node
# Ensure the node is not from a junction
- if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance.project is not toplevel:
- if provenance.project is project:
+ if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance._project is not toplevel:
+ if provenance._project is project:
self.warn("{}: Not persisting new reference in junctioned project".format(self))
- elif provenance.project is None:
- assert provenance.filename == ""
- assert provenance.shortname == ""
+ elif provenance._project is None:
+ assert provenance._filename == ""
+ assert provenance._shortname == ""
raise SourceError("{}: Error saving source reference to synthetic node."
.format(self))
else:
raise SourceError("{}: Cannot track source in a fragment from a junction"
- .format(provenance.shortname),
+ .format(provenance._shortname),
reason="tracking-junction-fragment")
#
# Step 2 - Set the ref in memory, and determine changed state
#
- clean = _yaml.node_sanitize(node, dict_type=dict)
- to_modify = _yaml.node_sanitize(node, dict_type=dict)
+ # TODO: we are working on dictionaries here, would be nicer to just work on the nodes themselves
+ clean = node._strip_node_info()
+ to_modify = node._strip_node_info()
current_ref = self.get_ref() # pylint: disable=assignment-from-no-return
@@ -955,22 +955,24 @@ class Source(Plugin):
for key, action in actions.items():
# Obtain the top level node and its file
if action == 'add':
- provenance = _yaml.node_get_provenance(node)
+ provenance = node.get_provenance()
else:
- provenance = _yaml.node_get_provenance(node, key=key)
+ provenance = node.get_node(key).get_provenance()
- toplevel_node = provenance.toplevel
+ toplevel_node = provenance._toplevel
# Get the path to whatever changed
if action == 'add':
- path = _yaml.node_find_target(toplevel_node, node)
+ path = toplevel_node._find(node)
else:
- path = _yaml.node_find_target(toplevel_node, node, key=key)
+ full_path = toplevel_node._find(node.get_node(key))
+ # We want the path to the node containing the key, not to the key
+ path = full_path[:-1]
- roundtrip_file = roundtrip_cache.get(provenance.filename)
+ roundtrip_file = roundtrip_cache.get(provenance._filename)
if not roundtrip_file:
- roundtrip_file = roundtrip_cache[provenance.filename] = _yaml.roundtrip_load(
- provenance.filename,
+ roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load(
+ provenance._filename,
allow_missing=True
)
@@ -1267,24 +1269,23 @@ class Source(Plugin):
@classmethod
def __init_defaults(cls, project, meta):
- if not cls.__defaults_set:
+ if cls.__defaults is None:
if meta.first_pass:
sources = project.first_pass_config.source_overrides
else:
sources = project.source_overrides
- cls.__defaults = _yaml.node_get(sources, dict, meta.kind, default_value={})
- cls.__defaults_set = True
+ cls.__defaults = sources.get_mapping(meta.kind, default={})
# This will resolve the final configuration to be handed
# off to source.configure()
#
@classmethod
def __extract_config(cls, meta):
- config = _yaml.node_get(cls.__defaults, dict, 'config', default_value={})
- config = _yaml.node_copy(config)
+ config = cls.__defaults.get_mapping('config', default={})
+ config = config.clone()
- _yaml.composite(config, meta.config)
- _yaml.node_final_assertions(config)
+ meta.config._composite(config)
+ config._assert_fully_composited()
return config
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py
index 3619d2b7e..e2842e0e0 100644
--- a/src/buildstream/testing/_sourcetests/build_checkout.py
+++ b/src/buildstream/testing/_sourcetests/build_checkout.py
@@ -63,9 +63,8 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
assert cli.get_element_state(project, element_name) == 'fetch needed'
result = cli.run(project=project, args=strict_args(['build', element_name], strict))
diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py
index aaf92a14d..d9b0876c6 100644
--- a/src/buildstream/testing/_sourcetests/fetch.py
+++ b/src/buildstream/testing/_sourcetests/fetch.py
@@ -53,9 +53,8 @@ def test_fetch(cli, tmpdir, datafiles, kind):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Assert that a fetch is needed
assert cli.get_element_state(project, element_name) == 'fetch needed'
@@ -89,7 +88,7 @@ def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
repo.source_config(ref=(ref if ref_storage == 'inline' else None))
]
}
- _yaml.dump(element, import_etc_path)
+ _yaml.roundtrip_dump(element, import_etc_path)
configure_project(project, {
'ref-storage': ref_storage
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index d682bb2ef..f532049dd 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -66,7 +66,7 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -84,7 +84,7 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
# No obvious ways of checking that the mirror has been fetched
# But at least we can be sure it succeeds
@@ -124,7 +124,7 @@ def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -142,7 +142,7 @@ def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
result.assert_success()
@@ -179,14 +179,14 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
config_project_dir = str(tmpdir.join('config'))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {
'name': 'config'
}
- _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
extra_mirrors = {
'mirrors': [
{
@@ -197,7 +197,7 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
}
]
}
- _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
generate_junction(str(tmpdir.join('config_repo')),
config_project_dir,
os.path.join(element_dir, 'config.bst'))
@@ -213,7 +213,7 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
@@ -252,14 +252,14 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
config_project_dir = str(tmpdir.join('config'))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {
'name': 'config'
}
- _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
extra_mirrors = {
'mirrors': [
{
@@ -270,7 +270,7 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
}
]
}
- _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
generate_junction(str(tmpdir.join('config_repo')),
config_project_dir,
os.path.join(element_dir, 'config.bst'))
@@ -286,7 +286,7 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
@@ -332,7 +332,7 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -350,16 +350,16 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'track', element_name])
result.assert_success()
# Tracking tries upstream first. Check the ref is from upstream.
new_element = _yaml.load(element_path)
- source = _yaml.node_get(new_element, dict, 'sources', [0])
+ source = new_element.get_sequence('sources').mapping_at(0)
if 'ref' in source:
- assert _yaml.node_get(source, str, 'ref') == upstream_ref
+ assert source.get_str('ref') == upstream_ref
@pytest.mark.datafiles(DATA_DIR)
@@ -397,7 +397,7 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -415,13 +415,13 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'track', element_name])
result.assert_success()
# Check that tracking fell back to the mirror
new_element = _yaml.load(element_path)
- source = _yaml.node_get(new_element, dict, 'sources', [0])
+ source = new_element.get_sequence('sources').mapping_at(0)
if 'ref' in source:
- assert _yaml.node_get(source, str, 'ref') == mirror_ref
+ assert source.get_str('ref') == mirror_ref
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index 3a5c264d9..fc8ad9893 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -91,7 +91,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
]
}
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
def get_value_for_umask(umask):
checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index 668ea29e5..01a39951f 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -43,7 +43,7 @@ def generate_element(repo, element_path, dep_name=None):
if dep_name:
element['depends'] = [dep_name]
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
@pytest.mark.datafiles(DATA_DIR)
@@ -290,8 +290,8 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
- _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(sources, os.path.join(element_path, 'sources.yml'))
# Assert that a fetch is needed
assert cli.get_element_state(project, element_name) == 'no reference'
@@ -321,13 +321,13 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
# Get all of the sources
assert 'sources' in new_sources
- sources_list = _yaml.node_get(new_sources, list, 'sources')
+ sources_list = new_sources.get_sequence('sources')
assert len(sources_list) == 1
# Get the first source from the sources list
- new_source = _yaml.node_get(new_sources, dict, 'sources', indices=[0])
+ new_source = sources_list.mapping_at(0)
assert 'ref' in new_source
- assert ref == _yaml.node_get(new_source, str, 'ref')
+ assert ref == new_source.get_str('ref')
@pytest.mark.datafiles(DATA_DIR)
@@ -363,8 +363,8 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
- _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, 'sources.yml'))
generate_junction(str(tmpdir.join('junction_repo')),
subproject_path, junction_path, store_ref=True)
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index ece3e0b8f..31443bdf9 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -42,7 +42,7 @@ def generate_element(repo, element_path, dep_name=None):
if dep_name:
element['depends'] = [dep_name]
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
def generate_import_element(tmpdir, kind, project, name):
@@ -75,7 +75,7 @@ def generate_project(tmpdir, name, config=None):
'element-path': 'elements'
}
project_conf.update(config)
- _yaml.dump(project_conf, os.path.join(subproject_path, 'project.conf'))
+ _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, 'project.conf'))
return project_name, subproject_path
@@ -87,7 +87,7 @@ def generate_simple_stack(project, name, dependencies):
'kind': 'stack',
'depends': dependencies
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
return element_name
diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py
index 5218f8f1e..5ceab5108 100644
--- a/src/buildstream/testing/_sourcetests/workspace.py
+++ b/src/buildstream/testing/_sourcetests/workspace.py
@@ -73,9 +73,8 @@ class WorkspaceCreator():
}
if element_attrs:
element = {**element, **element_attrs}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
diff --git a/src/buildstream/testing/_utils/__init__.py b/src/buildstream/testing/_utils/__init__.py
index b419d72b7..575226e22 100644
--- a/src/buildstream/testing/_utils/__init__.py
+++ b/src/buildstream/testing/_utils/__init__.py
@@ -7,4 +7,4 @@ from .junction import generate_junction
def configure_project(path, config):
config['name'] = 'test'
config['element-path'] = 'elements'
- _yaml.dump(config, os.path.join(path, 'project.conf'))
+ _yaml.roundtrip_dump(config, os.path.join(path, 'project.conf'))
diff --git a/src/buildstream/testing/_utils/junction.py b/src/buildstream/testing/_utils/junction.py
index 2bf53ac7c..98d23b0a2 100644
--- a/src/buildstream/testing/_utils/junction.py
+++ b/src/buildstream/testing/_utils/junction.py
@@ -34,7 +34,7 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
repo.source_config(ref=source_ref)
]
}
- _yaml.dump(element, junction_path)
+ _yaml.roundtrip_dump(element, junction_path)
return ref
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 02334aa53..95bf83eff 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -573,9 +573,9 @@ class CliIntegration(Cli):
project_config = _yaml.load(temp_project)
- _yaml.composite_dict(base_config, project_config)
+ project_config._composite(base_config)
- _yaml.dump(base_config, project_filename)
+ _yaml.roundtrip_dump(base_config, project_filename)
else:
@@ -883,6 +883,6 @@ def configured(directory, config=None):
# Dump it and yield the filename for test scripts to feed it
# to buildstream as an artument
filename = os.path.join(directory, "buildstream.conf")
- _yaml.dump(config, filename)
+ _yaml.roundtrip_dump(config, filename)
yield filename
diff --git a/tests/artifactcache/cache_size.py b/tests/artifactcache/cache_size.py
index 3a2956c5c..fb34b5fad 100644
--- a/tests/artifactcache/cache_size.py
+++ b/tests/artifactcache/cache_size.py
@@ -22,7 +22,7 @@ def create_project(project_dir):
project_conf = {
"name": "test"
}
- _yaml.dump(project_conf, project_file)
+ _yaml.roundtrip_dump(project_conf, project_file)
element_name = "test.bst"
create_element_size(element_name, project_dir, ".", [], 1024)
@@ -77,7 +77,7 @@ def test_quota_over_1024T(cli, tmpdir):
})
project = tmpdir.join("main")
os.makedirs(str(project))
- _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
+ _yaml.roundtrip_dump({'name': 'main'}, str(project.join("project.conf")))
volume_space_patch = mock.patch(
"buildstream._cas.CASQuota._get_cache_volume_size",
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 928a6c524..db51d196c 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -101,11 +101,11 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
project_config['name'] = 'test'
user_config_file = str(tmpdir.join('buildstream.conf'))
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
project_dir = tmpdir.mkdir('project')
project_config_file = str(project_dir.join('project.conf'))
- _yaml.dump(project_config, filename=project_config_file)
+ _yaml.roundtrip_dump(project_config, file=project_config_file)
context = Context()
context.load(config=user_config_file)
@@ -141,7 +141,7 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
}
}
project_conf_file = os.path.join(project, 'project.conf')
- _yaml.dump(project_conf, project_conf_file)
+ _yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 2798f032c..34d6916e8 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -20,11 +20,11 @@ DATA_DIR = os.path.join(
def project_set_artifacts(project, url):
project_conf_file = os.path.join(project, 'project.conf')
project_config = _yaml.load(project_conf_file)
- _yaml.node_set(project_config, 'artifacts', {
+ project_config['artifacts'] = {
'url': url,
'push': True
- })
- _yaml.dump(project_config, filename=project_conf_file)
+ }
+ _yaml.roundtrip_dump(project_config._strip_node_info(), file=project_conf_file)
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index a4ea74633..72f3103f0 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -73,7 +73,7 @@ def test_pull(cli, tmpdir, datafiles):
}
# Write down the user configuration file
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
# Ensure CLI calls will use it
cli.configure(user_config)
@@ -182,7 +182,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
}
# Write down the user configuration file
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
# Ensure CLI calls will use it
cli.configure(user_config)
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index a54c1df09..9b976c490 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -66,7 +66,7 @@ def test_push(cli, tmpdir, datafiles):
}
# Write down the user configuration file
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
# Fake minimal context
context = Context()
@@ -164,7 +164,7 @@ def test_push_message(tmpdir, datafiles):
}
# Write down the user configuration file
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
diff --git a/tests/cachekey/cachekey.py b/tests/cachekey/cachekey.py
index bbececb8c..acfe97947 100644
--- a/tests/cachekey/cachekey.py
+++ b/tests/cachekey/cachekey.py
@@ -198,11 +198,11 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings,
project_dir = tmpdir.mkdir(project_name)
project_config_file = str(project_dir.join('project.conf'))
- _yaml.dump(config, filename=project_config_file)
+ _yaml.roundtrip_dump(config, file=project_config_file)
elem_dir = project_dir.mkdir('elements')
element_file = str(elem_dir.join('stack.bst'))
- _yaml.dump({'kind': 'stack'}, filename=element_file)
+ _yaml.roundtrip_dump({'kind': 'stack'}, file=element_file)
result = cli.run(project=str(project_dir), args=[
'show',
diff --git a/tests/elements/filter.py b/tests/elements/filter.py
index d89c834e0..db20529bc 100644
--- a/tests/elements/filter.py
+++ b/tests/elements/filter.py
@@ -202,7 +202,7 @@ def test_filter_track(datafiles, cli, tmpdir):
"element-path": "elements",
}
project_file = os.path.join(str(tmpdir), "project.conf")
- _yaml.dump(project_config, project_file)
+ _yaml.roundtrip_dump(project_config, project_file)
input_config = {
"kind": "import",
@@ -210,7 +210,7 @@ def test_filter_track(datafiles, cli, tmpdir):
}
input_file = os.path.join(elements_dir, input_name)
- _yaml.dump(input_config, input_file)
+ _yaml.roundtrip_dump(input_config, input_file)
filter1_config = {
"kind": "filter",
@@ -219,7 +219,7 @@ def test_filter_track(datafiles, cli, tmpdir):
]
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
- _yaml.dump(filter1_config, filter1_file)
+ _yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
@@ -228,7 +228,7 @@ def test_filter_track(datafiles, cli, tmpdir):
]
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
- _yaml.dump(filter2_config, filter2_file)
+ _yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
assert cli.get_element_state(project, input_name) == 'no reference'
@@ -239,8 +239,8 @@ def test_filter_track(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
- new_input_ref = _yaml.node_get(source_node, str, 'ref')
+ source_node = new_input.get_sequence('sources').mapping_at(0)
+ new_input_ref = source_node.get_str('ref')
assert new_input_ref == ref
@@ -257,7 +257,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
"element-path": "elements",
}
project_file = os.path.join(str(tmpdir), "project.conf")
- _yaml.dump(project_config, project_file)
+ _yaml.roundtrip_dump(project_config, project_file)
input_config = {
"kind": "import",
@@ -265,7 +265,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
}
input_file = os.path.join(elements_dir, input_name)
- _yaml.dump(input_config, input_file)
+ _yaml.roundtrip_dump(input_config, input_file)
filter1_config = {
"kind": "filter",
@@ -274,7 +274,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
]
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
- _yaml.dump(filter1_config, filter1_file)
+ _yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
@@ -283,7 +283,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
]
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
- _yaml.dump(filter2_config, filter2_file)
+ _yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
assert cli.get_element_state(project, input_name) == 'no reference'
@@ -294,7 +294,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ source_node = new_input.get_sequence('sources').mapping_at(0)
assert 'ref' not in source_node
@@ -311,7 +311,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
"element-path": "elements",
}
project_file = os.path.join(str(tmpdir), "project.conf")
- _yaml.dump(project_config, project_file)
+ _yaml.roundtrip_dump(project_config, project_file)
input_config = {
"kind": "import",
@@ -319,7 +319,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
}
input_file = os.path.join(elements_dir, input_name)
- _yaml.dump(input_config, input_file)
+ _yaml.roundtrip_dump(input_config, input_file)
filter1_config = {
"kind": "filter",
@@ -328,7 +328,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
]
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
- _yaml.dump(filter1_config, filter1_file)
+ _yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
@@ -337,7 +337,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
]
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
- _yaml.dump(filter2_config, filter2_file)
+ _yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
assert cli.get_element_state(project, input_name) == 'no reference'
@@ -348,8 +348,8 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
- new_ref = _yaml.node_get(source_node, str, 'ref')
+ source_node = new_input.get_sequence('sources').mapping_at(0)
+ new_ref = source_node.get_str('ref')
assert new_ref == ref
@@ -367,7 +367,7 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
"element-path": "elements",
}
project_file = os.path.join(str(tmpdir), "project.conf")
- _yaml.dump(project_config, project_file)
+ _yaml.roundtrip_dump(project_config, project_file)
input_config = {
"kind": "import",
@@ -375,11 +375,11 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
}
input_file = os.path.join(elements_dir, input_name)
- _yaml.dump(input_config, input_file)
+ _yaml.roundtrip_dump(input_config, input_file)
input2_config = dict(input_config)
input2_file = os.path.join(elements_dir, input2_name)
- _yaml.dump(input2_config, input2_file)
+ _yaml.roundtrip_dump(input2_config, input2_file)
filter1_config = {
"kind": "filter",
@@ -388,7 +388,7 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
]
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
- _yaml.dump(filter1_config, filter1_file)
+ _yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
@@ -397,7 +397,7 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
]
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
- _yaml.dump(filter2_config, filter2_file)
+ _yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
states = cli.get_element_states(project, [input_name, input2_name])
@@ -413,13 +413,13 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
- new_ref = _yaml.node_get(source_node, str, 'ref')
+ source_node = new_input.get_sequence('sources').mapping_at(0)
+ new_ref = source_node.get_str('ref')
assert new_ref == ref
new_input2 = _yaml.load(input2_file)
- source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0])
- new_ref2 = _yaml.node_get(source_node2, str, 'ref')
+ source_node2 = new_input2.get_sequence('sources').mapping_at(0)
+ new_ref2 = source_node2.get_str('ref')
assert new_ref2 == ref
@@ -437,7 +437,7 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
"element-path": "elements",
}
project_file = os.path.join(str(tmpdir), "project.conf")
- _yaml.dump(project_config, project_file)
+ _yaml.roundtrip_dump(project_config, project_file)
input_config = {
"kind": "import",
@@ -445,11 +445,11 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
}
input_file = os.path.join(elements_dir, input_name)
- _yaml.dump(input_config, input_file)
+ _yaml.roundtrip_dump(input_config, input_file)
input2_config = dict(input_config)
input2_file = os.path.join(elements_dir, input2_name)
- _yaml.dump(input2_config, input2_file)
+ _yaml.roundtrip_dump(input2_config, input2_file)
filter1_config = {
"kind": "filter",
@@ -458,7 +458,7 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
]
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
- _yaml.dump(filter1_config, filter1_file)
+ _yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
@@ -467,7 +467,7 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
]
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
- _yaml.dump(filter2_config, filter2_file)
+ _yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
states = cli.get_element_states(project, [input_name, input2_name])
@@ -482,12 +482,12 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = _yaml.node_get(new_input, dict, 'sources', indices=[0])
+ source_node = new_input.get_sequence('sources').mapping_at(0)
assert 'ref' not in source_node
new_input2 = _yaml.load(input2_file)
- source_node2 = _yaml.node_get(new_input2, dict, 'sources', indices=[0])
- new_ref2 = _yaml.node_get(source_node2, str, 'ref')
+ source_node2 = new_input2.get_sequence('sources').mapping_at(0)
+ new_ref2 = source_node2.get_str('ref')
assert new_ref2 == ref
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index c6d128b72..fe83d7295 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -4,8 +4,8 @@ from buildstream import Element, Scope
# Copies files from the dependent element but inserts split-rules using dynamic data
class DynamicElement(Element):
def configure(self, node):
- self.node_validate(node, ['split-rules'])
- self.split_rules = self.node_get_member(node, dict, 'split-rules')
+ node.validate_keys(['split-rules'])
+ self.split_rules = {key: value.as_str_list() for key, value in node.get_mapping('split-rules').items()}
def preflight(self):
pass
@@ -25,7 +25,7 @@ class DynamicElement(Element):
dep.stage_artifact(sandbox)
bstdata = self.get_public_data("bst")
- self.node_set_member(bstdata, "split-rules", self.split_rules)
+ bstdata["split-rules"] = self.split_rules
self.set_public_data("bst", bstdata)
return ""
diff --git a/tests/format/include.py b/tests/format/include.py
index bfadce7ed..8902aa3eb 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -28,14 +28,14 @@ def test_include_project_file(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, bool, 'included')
+ assert loaded.get_bool('included')
def test_include_missing_file(cli, tmpdir):
tmpdir.join('project.conf').write('{"name": "test"}')
element = tmpdir.join('include_missing_file.bst')
- # Normally we would use dicts and _yaml.dump to write such things, but here
+ # Normally we would use dicts and _yaml.roundtrip_dump to write such things, but here
# we want to be sure of a stable line and column number.
element.write(textwrap.dedent("""
kind: manual
@@ -55,7 +55,7 @@ def test_include_dir(cli, tmpdir):
tmpdir.mkdir('subdir')
element = tmpdir.join('include_dir.bst')
- # Normally we would use dicts and _yaml.dump to write such things, but here
+ # Normally we would use dicts and _yaml.roundtrip_dump to write such things, but here
# we want to be sure of a stable line and column number.
element.write(textwrap.dedent("""
kind: manual
@@ -87,7 +87,7 @@ def test_include_junction_file(cli, tmpdir, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, bool, 'included')
+ assert loaded.get_bool('included')
@pytest.mark.datafiles(DATA_DIR)
@@ -102,7 +102,7 @@ def test_include_junction_options(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'build_arch') == 'x86_64'
+ assert loaded.get_str('build_arch') == 'x86_64'
@pytest.mark.datafiles(DATA_DIR)
@@ -126,7 +126,7 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, junction_path)
+ _yaml.roundtrip_dump(element, junction_path)
result = cli.run(project=project, args=[
'show',
@@ -135,7 +135,7 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'included', default_value=None) is None
+ assert loaded.get_str('included', default=None) is None
@pytest.mark.datafiles(DATA_DIR)
@@ -159,7 +159,7 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, junction_path)
+ _yaml.roundtrip_dump(element, junction_path)
result = cli.run(project=project, args=[
'show',
@@ -168,7 +168,7 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'included', default_value=None) is not None
+ assert loaded.get_str('included', default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -182,8 +182,8 @@ def test_include_element_overrides(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'manual_main_override', default_value=None) is not None
- assert _yaml.node_get(loaded, str, 'manual_included_override', default_value=None) is not None
+ assert loaded.get_str('manual_main_override', default=None) is not None
+ assert loaded.get_str('manual_included_override', default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -197,7 +197,7 @@ def test_include_element_overrides_composition(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, list, 'build-commands') == ['first', 'second']
+ assert loaded.get_sequence('build-commands').as_str_list() == ['first', 'second']
@pytest.mark.datafiles(DATA_DIR)
@@ -213,9 +213,9 @@ def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
loaded = _yaml.load_data(result.output)
# Assert that the explicitly overwritten public data is present
- bst = _yaml.node_get(loaded, dict, 'bst')
+ bst = loaded.get_mapping('bst')
assert 'foo-commands' in bst
- assert _yaml.node_get(bst, list, 'foo-commands') == ['need', 'this']
+ assert bst.get_sequence('foo-commands').as_str_list() == ['need', 'this']
@pytest.mark.datafiles(DATA_DIR)
@@ -229,7 +229,7 @@ def test_include_element_overrides_sub_include(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'included', default_value=None) is not None
+ assert loaded.get_str('included', default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
@@ -248,8 +248,8 @@ def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
'junction.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'main_override', default_value=None) is not None
- assert _yaml.node_get(loaded, str, 'included_override', default_value=None) is None
+ assert loaded.get_str('main_override', default=None) is not None
+ assert loaded.get_str('included_override', default=None) is None
@pytest.mark.datafiles(DATA_DIR)
@@ -264,7 +264,7 @@ def test_conditional_in_fragment(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'size') == '8'
+ assert loaded.get_str('size') == '8'
@pytest.mark.datafiles(DATA_DIR)
@@ -278,7 +278,7 @@ def test_inner(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'build_arch') == 'x86_64'
+ assert loaded.get_str('build_arch') == 'x86_64'
@pytest.mark.datafiles(DATA_DIR)
@@ -310,4 +310,4 @@ def test_local_to_junction(cli, tmpdir, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, bool, 'included')
+ assert loaded.get_bool('included')
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index 4afde817d..f764b16a6 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -6,150 +6,141 @@ from buildstream import _yaml
def make_includes(basedir):
- _yaml.dump({'name': 'test'},
- os.path.join(basedir, 'project.conf'))
+ _yaml.roundtrip_dump({'name': 'test'}, os.path.join(basedir, 'project.conf'))
context = Context()
project = Project(basedir, context)
loader = project.loader
return Includes(loader)
-def test_main_has_prority(tmpdir):
+def test_main_has_priority(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml'],
- 'test': ['main']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_include_cannot_append(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml'],
- 'test': ['main']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_main_can_append(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml'],
- 'test': {'(>)': ['main']}},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a', 'main']
+ assert main.get_sequence('test').as_str_list() == ['a', 'main']
def test_sibling_cannot_append_backward(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
- _yaml.dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_sibling_can_append_forward(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.dump({'test': {'(>)': ['b']}},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['b']}},
+ str(tmpdir.join('b.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a', 'b']
+ assert main.get_sequence('test').as_str_list() == ['a', 'b']
def test_lastest_sibling_has_priority(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_main_keeps_keys(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml'],
- 'something': 'else'},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a']
- assert _yaml.node_get(main, str, 'something') == 'else'
+ assert main.get_sequence('test').as_str_list() == ['a']
+ assert main.get_str('something') == 'else'
def test_overwrite_directive_on_later_composite(tmpdir):
includes = make_includes(str(tmpdir))
- _yaml.dump({'(@)': ['a.yml', 'b.yml'],
- 'test': {'(=)': ['Overwritten']}},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}},
+ str(tmpdir.join('main.yml')))
main = _yaml.load(str(tmpdir.join('main.yml')))
# a.yml
- _yaml.dump({'test': ['some useless', 'list', 'to be overwritten'],
- 'foo': 'should not be present'},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'],
+ 'foo': 'should not be present'},
+ str(tmpdir.join('a.yml')))
# b.yaml isn't going to have a 'test' node to overwrite
- _yaml.dump({'foo': 'should be present'},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'foo': 'should be present'},
+ str(tmpdir.join('b.yml')))
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['Overwritten']
- assert _yaml.node_get(main, str, 'foo') == 'should be present'
+ assert main.get_sequence('test').as_str_list() == ['Overwritten']
+ assert main.get_str('foo') == 'should be present'
diff --git a/tests/format/junctions.py b/tests/format/junctions.py
index 8842bc617..b810c55a3 100644
--- a/tests/format/junctions.py
+++ b/tests/format/junctions.py
@@ -331,7 +331,7 @@ def test_git_show(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'base.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
# Check that bst show succeeds with implicit subproject fetching and the
# pipeline includes the subproject element
@@ -356,7 +356,7 @@ def test_git_build(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'base.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
# Build (with implicit fetch of subproject), checkout
result = cli.run(project=project, args=['build', 'target.bst'])
@@ -389,7 +389,7 @@ def test_git_missing_project_conf(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, str(project / 'base.bst'))
+ _yaml.roundtrip_dump(element, str(project / 'base.bst'))
result = cli.run(project=project, args=['build', 'app.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION)
@@ -423,7 +423,7 @@ def test_build_git_cross_junction_names(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'base.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
print(element)
print(cli.get_pipeline(project, ['base.bst']))
diff --git a/tests/format/optionarch.py b/tests/format/optionarch.py
index a35ac685d..f347e27ae 100644
--- a/tests/format/optionarch.py
+++ b/tests/format/optionarch.py
@@ -48,7 +48,7 @@ def test_conditional(cli, datafiles, machine, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionbool.py b/tests/format/optionbool.py
index 0d1ee601e..d772b483c 100644
--- a/tests/format/optionbool.py
+++ b/tests/format/optionbool.py
@@ -42,7 +42,7 @@ def test_conditional_cli(cli, datafiles, target, option, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'thepony') == expected
+ assert loaded.get_str('thepony') == expected
# Test configuration of boolean option in the config file
@@ -71,7 +71,7 @@ def test_conditional_config(cli, datafiles, target, option, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'thepony') == expected
+ assert loaded.get_str('thepony') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optioneltmask.py b/tests/format/optioneltmask.py
index d33b5771c..75265fdd7 100644
--- a/tests/format/optioneltmask.py
+++ b/tests/format/optioneltmask.py
@@ -28,7 +28,7 @@ def test_conditional_cli(cli, datafiles, target, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'debug') == expected
+ assert loaded.get_str('debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -56,7 +56,7 @@ def test_conditional_config(cli, datafiles, target, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'debug') == expected
+ assert loaded.get_str('debug') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionenum.py b/tests/format/optionenum.py
index b8a96b0c2..f9aff503f 100644
--- a/tests/format/optionenum.py
+++ b/tests/format/optionenum.py
@@ -33,7 +33,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -66,7 +66,7 @@ def test_conditional_config(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionexports.py b/tests/format/optionexports.py
index 5df7522b5..104abcf83 100644
--- a/tests/format/optionexports.py
+++ b/tests/format/optionexports.py
@@ -36,4 +36,4 @@ def test_export(cli, datafiles, option_name, option_value, var_name, var_value):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, var_name) == var_value
+ assert loaded.get_str(var_name) == var_value
diff --git a/tests/format/optionflags.py b/tests/format/optionflags.py
index e28c54236..29bb7ec2c 100644
--- a/tests/format/optionflags.py
+++ b/tests/format/optionflags.py
@@ -39,7 +39,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -69,7 +69,7 @@ def test_conditional_config(cli, datafiles, target, option, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionos.py b/tests/format/optionos.py
index 57277b106..f915d889e 100644
--- a/tests/format/optionos.py
+++ b/tests/format/optionos.py
@@ -47,7 +47,7 @@ def test_conditionals(cli, datafiles, system, value, expected):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'result') == expected
+ assert loaded.get_str('result') == expected
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/optionoverrides.py b/tests/format/optionoverrides.py
index 60d02b3a3..d4ed257dd 100644
--- a/tests/format/optionoverrides.py
+++ b/tests/format/optionoverrides.py
@@ -29,4 +29,4 @@ def test_override(cli, datafiles, arch):
expected_value = '--host={}-unknown-linux-gnu'.format(arch)
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'conf-global') == expected_value
+ assert loaded.get_str('conf-global') == expected_value
diff --git a/tests/format/options.py b/tests/format/options.py
index 3a8210dc3..9c0e043f9 100644
--- a/tests/format/options.py
+++ b/tests/format/options.py
@@ -136,7 +136,7 @@ def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'prefix') == expected_prefix
+ assert loaded.get_str('prefix') == expected_prefix
@pytest.mark.datafiles(DATA_DIR)
@@ -159,7 +159,7 @@ def test_nested_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'debug') == expected
+ assert loaded.get_str('debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -182,7 +182,7 @@ def test_compound_and_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'debug') == expected
+ assert loaded.get_str('debug') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -205,7 +205,7 @@ def test_compound_or_conditional(cli, datafiles, debug, logging, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, str, 'logging') == expected
+ assert loaded.get_str('logging') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -223,10 +223,10 @@ def test_deep_nesting_level1(cli, datafiles, option, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = _yaml.node_get(loaded, list, 'shallow-nest')
- first_dict = shallow_list[0]
+ shallow_list = loaded.get_sequence('shallow-nest')
+ first_dict = shallow_list.mapping_at(0)
- assert _yaml.node_get(first_dict, str, 'animal') == expected
+ assert first_dict.get_str('animal') == expected
@pytest.mark.datafiles(DATA_DIR)
@@ -244,8 +244,8 @@ def test_deep_nesting_level2(cli, datafiles, option, expected):
'element-deeper.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = _yaml.node_get(loaded, list, 'deep-nest')
- deeper_list = shallow_list[0]
- first_dict = deeper_list[0]
+ shallow_list = loaded.get_sequence('deep-nest')
+ deeper_list = shallow_list.sequence_at(0)
+ first_dict = deeper_list.mapping_at(0)
- assert _yaml.node_get(first_dict, str, 'animal') == expected
+ assert first_dict.get_str('animal') == expected
diff --git a/tests/format/project.py b/tests/format/project.py
index fbb742d47..2e0a729dc 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -82,8 +82,8 @@ def test_load_default_project(cli, datafiles):
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert _yaml.node_get(env, str, 'USER') == "tomjon"
- assert _yaml.node_get(env, str, 'TERM') == "dumb"
+ assert env.get_str('USER') == "tomjon"
+ assert env.get_str('TERM') == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -97,8 +97,8 @@ def test_load_project_from_subdir(cli, datafiles):
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert _yaml.node_get(env, str, 'USER') == "tomjon"
- assert _yaml.node_get(env, str, 'TERM') == "dumb"
+ assert env.get_str('USER') == "tomjon"
+ assert env.get_str('TERM') == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -111,7 +111,7 @@ def test_override_project_path(cli, datafiles):
# Read back the overridden path
env = _yaml.load_data(result.output)
- assert _yaml.node_get(env, str, 'PATH') == "/bin:/sbin"
+ assert env.get_str('PATH') == "/bin:/sbin"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -200,7 +200,7 @@ def test_plugin_no_load_ref(cli, datafiles, ref_storage):
}
]
}
- _yaml.dump(config, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(config, os.path.join(project, 'project.conf'))
result = cli.run(project=project, silent=True, args=['show', 'noloadref.bst'])
diff --git a/tests/format/projectoverrides.py b/tests/format/projectoverrides.py
index 4b0c3f4d0..730e43b1e 100644
--- a/tests/format/projectoverrides.py
+++ b/tests/format/projectoverrides.py
@@ -24,6 +24,6 @@ def test_prepend_configure_commands(cli, datafiles):
result.assert_success()
loaded = _yaml.load_data(result.output)
- config_commands = _yaml.node_get(loaded, list, 'configure-commands')
+ config_commands = loaded.get_sequence('configure-commands').as_str_list()
assert len(config_commands) == 3
assert config_commands[0] == 'echo "Hello World!"'
diff --git a/tests/format/variables.py b/tests/format/variables.py
index 87e3b2903..93814279a 100644
--- a/tests/format/variables.py
+++ b/tests/format/variables.py
@@ -49,7 +49,7 @@ def test_defaults(cli, datafiles, target, varname, expected):
])
result.assert_success()
result_vars = _yaml.load_data(result.output)
- assert _yaml.node_get(result_vars, str, varname) == expected
+ assert result_vars.get_str(varname) == expected
################################################################
@@ -75,7 +75,7 @@ def test_overrides(cli, datafiles, target, varname, expected):
])
result.assert_success()
result_vars = _yaml.load_data(result.output)
- assert _yaml.node_get(result_vars, str, varname) == expected
+ assert result_vars.get_str(varname) == expected
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing_variables'))
@@ -110,7 +110,7 @@ def test_use_of_protected_var_project_conf(cli, datafiles, protected_var):
protected_var: 'some-value'
}
}
- _yaml.dump(conf, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(conf, os.path.join(project, 'project.conf'))
element = {
'kind': 'import',
@@ -121,7 +121,7 @@ def test_use_of_protected_var_project_conf(cli, datafiles, protected_var):
}
],
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_main_error(ErrorDomain.LOAD,
@@ -142,7 +142,7 @@ def test_use_of_protected_var_element_overrides(cli, datafiles, protected_var):
}
}
}
- _yaml.dump(conf, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(conf, os.path.join(project, 'project.conf'))
element = {
'kind': 'manual',
@@ -153,7 +153,7 @@ def test_use_of_protected_var_element_overrides(cli, datafiles, protected_var):
}
],
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_main_error(ErrorDomain.LOAD,
@@ -176,7 +176,7 @@ def test_use_of_protected_var_in_element(cli, datafiles, protected_var):
protected_var: 'some-value'
}
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_main_error(ErrorDomain.LOAD,
diff --git a/tests/frontend/__init__.py b/tests/frontend/__init__.py
index 8cf7625a9..f1c8c41b8 100644
--- a/tests/frontend/__init__.py
+++ b/tests/frontend/__init__.py
@@ -7,4 +7,4 @@ from buildstream import _yaml
def configure_project(path, config):
config['name'] = 'test'
config['element-path'] = 'elements'
- _yaml.dump(config, os.path.join(path, 'project.conf'))
+ _yaml.roundtrip_dump(config, os.path.join(path, 'project.conf'))
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 97bce91a7..d3eec0d21 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -15,7 +15,7 @@ from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream import utils
-from tests.testutils import generate_junction, yaml_file_get_provenance, create_artifact_share
+from tests.testutils import generate_junction, create_artifact_share
from . import configure_project
@@ -133,7 +133,7 @@ def test_build_invalid_filename_chars(datafiles, cli):
element = {
'kind': 'stack',
}
- _yaml.dump(element, os.path.join(project, 'elements', element_name))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'elements', element_name))
result = cli.run(project=project, args=strict_args(['build', element_name], 'non-strict'))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -151,7 +151,7 @@ def test_build_invalid_filename_chars_dep(datafiles, cli):
element = {
'kind': 'stack',
}
- _yaml.dump(element, os.path.join(project, 'elements', element_name))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'elements', element_name))
result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -452,7 +452,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to track it, this will bail with the appropriate error
# informing the user to track the junction first
@@ -460,8 +460,9 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- provenance = yaml_file_get_provenance(
- element_path, 'junction-dep.bst', key='depends', indices=[0])
+ element_node = _yaml.load(element_path, shortname='junction-dep.bst')
+ ref_node = element_node.get_sequence('depends').mapping_at(0)
+ provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
@@ -491,7 +492,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
@@ -507,7 +508,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
}
}
}
- _yaml.dump(project_refs, os.path.join(project, 'junction.refs'))
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
@@ -540,7 +541,7 @@ def test_build_checkout_junction(cli, tmpdir, datafiles):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
@@ -587,7 +588,7 @@ def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
@@ -634,7 +635,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now open a workspace on the junction
#
@@ -711,7 +712,7 @@ def test_build_junction_short_notation(cli, tmpdir, datafiles):
'kind': 'stack',
'depends': ['junction.bst:import-etc.bst']
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
@@ -752,7 +753,7 @@ def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
'kind': 'stack',
'depends': [{'filename': 'junction.bst:import-etc.bst'}]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
@@ -795,7 +796,7 @@ def test_build_junction_short_notation_with_junction(cli, tmpdir, datafiles):
'junction': 'junction.bst',
}]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should fail as filenames should not contain
# `:` when junction is explicity specified
@@ -819,7 +820,7 @@ def test_build_junction_transitive_short_notation_with_junction(cli, tmpdir, dat
'kind': 'stack',
'depends': ['junction.bst:import-etc.bst:foo.bst']
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should fail as recursive lookups for
# cross-junction elements is not allowed.
diff --git a/tests/frontend/buildtrack.py b/tests/frontend/buildtrack.py
index 13e5ab96e..ff3c53281 100644
--- a/tests/frontend/buildtrack.py
+++ b/tests/frontend/buildtrack.py
@@ -31,7 +31,7 @@ def create_element(repo, name, path, dependencies, ref=None):
],
'depends': dependencies
}
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -120,7 +120,7 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage, strict,
'test': initial_project_refs
}
}
- _yaml.dump(project_refs, os.path.join(project, 'project.refs'))
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, 'project.refs'))
args = ['build']
args += itertools.chain.from_iterable(zip(itertools.repeat('--track'), track_targets))
@@ -219,7 +219,7 @@ def test_build_track_all(cli, tmpdir, datafiles, strict, ref_storage):
}
]
}
- _yaml.dump(element, os.path.join(element_path, 'composed.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, 'composed.bst'))
# Track the junction itself first.
result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
diff --git a/tests/frontend/configurable_warnings.py b/tests/frontend/configurable_warnings.py
index c63a0b673..7936b2f89 100644
--- a/tests/frontend/configurable_warnings.py
+++ b/tests/frontend/configurable_warnings.py
@@ -40,7 +40,7 @@ def build_project(datafiles, fatal_warnings):
project = get_project(fatal_warnings)
- _yaml.dump(project, os.path.join(project_path, "project.conf"))
+ _yaml.roundtrip_dump(project, os.path.join(project_path, "project.conf"))
return project_path
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index 81fd43487..ca21e7548 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -13,8 +13,8 @@ def prepare_junction_project(cli, tmpdir):
os.makedirs(str(main_project))
os.makedirs(str(sub_project))
- _yaml.dump({'name': 'main'}, str(main_project.join("project.conf")))
- _yaml.dump({'name': 'sub'}, str(sub_project.join("project.conf")))
+ _yaml.roundtrip_dump({'name': 'main'}, str(main_project.join("project.conf")))
+ _yaml.roundtrip_dump({'name': 'sub'}, str(sub_project.join("project.conf")))
import_dir = tmpdir.join("import")
os.makedirs(str(import_dir))
@@ -26,18 +26,18 @@ def prepare_junction_project(cli, tmpdir):
import_repo = create_repo("git", str(import_repo_dir))
import_ref = import_repo.create(str(import_dir))
- _yaml.dump({'kind': 'import',
- 'sources': [import_repo.source_config(ref=import_ref)]},
- str(sub_project.join("data.bst")))
+ _yaml.roundtrip_dump({'kind': 'import',
+ 'sources': [import_repo.source_config(ref=import_ref)]},
+ str(sub_project.join("data.bst")))
sub_repo_dir = tmpdir.join("sub_repo")
os.makedirs(str(sub_repo_dir))
sub_repo = create_repo("git", str(sub_repo_dir))
sub_ref = sub_repo.create(str(sub_project))
- _yaml.dump({'kind': 'junction',
- 'sources': [sub_repo.source_config(ref=sub_ref)]},
- str(main_project.join("sub.bst")))
+ _yaml.roundtrip_dump({'kind': 'junction',
+ 'sources': [sub_repo.source_config(ref=sub_ref)]},
+ str(main_project.join("sub.bst")))
args = ['source', 'fetch', 'sub.bst']
result = cli.run(project=str(main_project), args=args)
@@ -75,11 +75,12 @@ def test_list_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
- workspaces = _yaml.node_get(loaded, list, 'workspaces')
+ workspaces = loaded.get_sequence('workspaces')
assert len(workspaces) == 1
- assert 'element' in workspaces[0]
- assert _yaml.node_get(workspaces[0], str, 'element') == element
+ first_workspace = workspaces.mapping_at(0)
+
+ assert 'element' in first_workspace
+ assert first_workspace.get_str('element') == element
def test_close_cross_junction(cli, tmpdir):
@@ -97,8 +98,7 @@ def test_close_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
- workspaces = _yaml.node_get(loaded, list, 'workspaces')
+ workspaces = loaded.get_sequence('workspaces')
assert not workspaces
@@ -116,8 +116,7 @@ def test_close_all_cross_junction(cli, tmpdir):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
- workspaces = _yaml.node_get(loaded, list, 'workspaces')
+ workspaces = loaded.get_sequence('workspaces')
assert not workspaces
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index f470ce2b7..7ea357ac2 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -9,7 +9,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
-from tests.testutils import generate_junction, yaml_file_get_provenance
+from tests.testutils import generate_junction
from . import configure_project
@@ -37,9 +37,8 @@ def test_fetch_default_targets(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Assert that a fetch is needed
assert cli.get_element_state(project, element_name) == 'fetch needed'
@@ -113,7 +112,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
@@ -129,7 +128,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
}
}
}
- _yaml.dump(project_refs, os.path.join(project, 'junction.refs'))
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
# Now try to fetch it, this should automatically result in fetching
# the junction itself.
@@ -163,7 +162,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to fetch it, this will bail with the appropriate error
# informing the user to track the junction first
@@ -171,6 +170,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- provenance = yaml_file_get_provenance(
- element_path, 'junction-dep.bst', key='depends', indices=[0])
+ element_node = _yaml.load(element_path, shortname='junction-dep.bst')
+ ref_node = element_node.get_sequence('depends').mapping_at(0)
+ provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index d135abf1e..0fdc0eda5 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -19,9 +19,9 @@ def test_defaults(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert _yaml.node_get(project_conf, str, 'name') == 'foo'
- assert _yaml.node_get(project_conf, str, 'format-version') == str(BST_FORMAT_VERSION)
- assert _yaml.node_get(project_conf, str, 'element-path') == 'elements'
+ assert project_conf.get_str('name') == 'foo'
+ assert project_conf.get_str('format-version') == str(BST_FORMAT_VERSION)
+ assert project_conf.get_str('element-path') == 'elements'
def test_all_options(cli, tmpdir):
@@ -38,9 +38,9 @@ def test_all_options(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert _yaml.node_get(project_conf, str, 'name') == 'foo'
- assert _yaml.node_get(project_conf, str, 'format-version') == str(2)
- assert _yaml.node_get(project_conf, str, 'element-path') == 'ponies'
+ assert project_conf.get_str('name') == 'foo'
+ assert project_conf.get_str('format-version') == str(2)
+ assert project_conf.get_str('element-path') == 'ponies'
elements_dir = os.path.join(project, 'ponies')
assert os.path.isdir(elements_dir)
@@ -71,8 +71,8 @@ def test_force_overwrite_project(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert _yaml.node_get(project_conf, str, 'name') == 'foo'
- assert _yaml.node_get(project_conf, str, 'format-version') == str(BST_FORMAT_VERSION)
+ assert project_conf.get_str('name') == 'foo'
+ assert project_conf.get_str('format-version') == str(BST_FORMAT_VERSION)
def test_relative_path_directory_as_argument(cli, tmpdir):
@@ -85,9 +85,9 @@ def test_relative_path_directory_as_argument(cli, tmpdir):
result.assert_success()
project_conf = _yaml.load(project_path)
- assert _yaml.node_get(project_conf, str, 'name') == 'foo'
- assert _yaml.node_get(project_conf, str, 'format-version') == str(BST_FORMAT_VERSION)
- assert _yaml.node_get(project_conf, str, 'element-path') == 'elements'
+ assert project_conf.get_str('name') == 'foo'
+ assert project_conf.get_int('format-version') == BST_FORMAT_VERSION
+ assert project_conf.get_str('element-path') == 'elements'
def test_set_directory_and_directory_as_argument(cli, tmpdir):
@@ -143,6 +143,6 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
assert full_element_path.exists()
project_conf = _yaml.load(str(project_conf_path))
- assert _yaml.node_get(project_conf, str, 'name') == 'project_name'
- assert _yaml.node_get(project_conf, str, 'format-version') == '0'
- assert _yaml.node_get(project_conf, str, 'element-path') == element_path
+ assert project_conf.get_str('name') == 'project_name'
+ assert project_conf.get_str('format-version') == '0'
+ assert project_conf.get_str('element-path') == element_path
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index a8f894976..6a17bf771 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -39,9 +39,8 @@ def test_default_logging(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Now try to fetch it
result = cli.run(project=project, args=['source', 'fetch', element_name])
@@ -76,9 +75,8 @@ def test_custom_logging(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Now try to fetch it
result = cli.run(project=project, args=['source', 'fetch', element_name])
@@ -104,7 +102,7 @@ def test_failed_build_listing(cli, datafiles):
]
}
}
- _yaml.dump(element, os.path.join(project, element_path))
+ _yaml.roundtrip_dump(element, os.path.join(project, element_path))
element_names.append(element_name)
result = cli.run(project=project, args=['--on-error=continue', 'build', *element_names])
result.assert_main_error(ErrorDomain.STREAM, None)
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index 09beb38b8..855155785 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -116,7 +116,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
full_mirror = mirror_repo.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
if ref_storage == 'project.refs':
# Manually set project.refs to avoid caching the repo prematurely
@@ -128,7 +128,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
}
}}
project_refs_path = os.path.join(project_dir, 'project.refs')
- _yaml.dump(project_refs, project_refs_path)
+ _yaml.roundtrip_dump(project_refs, project_refs_path)
project = {
'name': 'test',
@@ -151,7 +151,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
project['mirrors'] = mirror_data
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
result.assert_success()
@@ -167,11 +167,11 @@ def test_mirror_fetch_multi(cli, tmpdir):
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project_file = os.path.join(project_dir, 'project.conf')
project = generate_project()
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
result.assert_success()
@@ -191,11 +191,11 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project_file = os.path.join(project_dir, 'project.conf')
project = generate_project()
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'source', 'fetch', element_name])
result.assert_success()
@@ -222,11 +222,11 @@ def test_mirror_fetch_default_userconfig(cli, tmpdir):
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project_file = os.path.join(project_dir, 'project.conf')
project = generate_project()
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
userconfig = {
'projects': {
@@ -262,11 +262,11 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project_file = os.path.join(project_dir, 'project.conf')
project = generate_project()
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
userconfig = {
'projects': {
@@ -346,7 +346,7 @@ def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
aliased_repo = alias + ':' + repo_name
element['sources'][0]['submodules']['defined']['url'] = aliased_repo
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
full_mirror = main_mirror.source_config()['url']
mirror_map, _ = os.path.split(full_mirror)
@@ -366,7 +366,7 @@ def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
result.assert_success()
@@ -433,7 +433,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
element_name = 'test.bst'
element_path = os.path.join(element_dir, element_name)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -451,7 +451,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
os.rename(upstream_bin_repo.repo, '{}.bak'.format(upstream_bin_repo.repo))
@@ -529,7 +529,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
element_name = 'test.bst'
element_path = os.path.join(element_dir, element_name)
os.makedirs(element_dir)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
project = {
'name': 'test',
@@ -547,7 +547,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
]
}
project_file = os.path.join(project_dir, 'project.conf')
- _yaml.dump(project, project_file)
+ _yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
os.rename(upstream_main_repo.repo, '{}.bak'.format(upstream_main_repo.repo))
diff --git a/tests/frontend/order.py b/tests/frontend/order.py
index 7f354c88e..c62377419 100644
--- a/tests/frontend/order.py
+++ b/tests/frontend/order.py
@@ -37,7 +37,7 @@ def create_element(project, name, dependencies):
],
'depends': dependencies
}
- _yaml.dump(element, os.path.join(element_path, name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, name))
return repo
diff --git a/tests/frontend/overlaps.py b/tests/frontend/overlaps.py
index 6f6ab26a3..eb2cd4a86 100644
--- a/tests/frontend/overlaps.py
+++ b/tests/frontend/overlaps.py
@@ -25,7 +25,7 @@ def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True, project_n
else:
template["fail-on-overlap"] = fail_on_overlap
projectfile = os.path.join(project_dir, "project.conf")
- _yaml.dump(template, projectfile)
+ _yaml.roundtrip_dump(template, projectfile)
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index f17c14029..ead5bc3f5 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -38,13 +38,12 @@ class FetchFetcher(SourceFetcher):
class FetchSource(Source):
# Read config to know which URLs to fetch
def configure(self, node):
- self.original_urls = self.node_get_member(node, list, 'urls')
- self.output_file = self.node_get_member(node, str, 'output-text')
- self.fetch_succeeds = {}
- if 'fetch-succeeds' in node:
- fetch_succeeds_node = self.node_get_member(node, dict, 'fetch-succeeds')
- for key, value in self.node_items(fetch_succeeds_node):
- self.fetch_succeeds[key] = value in ('True', 'true')
+ self.original_urls = node.get_sequence('urls').as_str_list()
+ self.output_file = node.get_str('output-text')
+ self.fetch_succeeds = {
+ key: value.as_bool()
+ for key, value in node.get_mapping('fetch-succeeds', {}).items()
+ }
# First URL is the primary one for this test
#
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index 8a5ef9c7b..6ee57df23 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -57,7 +57,7 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
},
'cachedir': cachedir
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
create_element_size('repo.bst', project_dir, element_path, [], 10000)
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index d138b3693..756fe1786 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -10,7 +10,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
-from tests.testutils import generate_junction, yaml_file_get_provenance
+from tests.testutils import generate_junction
from . import configure_project
@@ -252,7 +252,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, w
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
@@ -268,7 +268,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, w
}
}
}
- _yaml.dump(project_refs, os.path.join(project, 'junction.refs'))
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
# Open a workspace if we're testing workspaced behavior
if workspaced:
@@ -310,7 +310,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Open a workspace if we're testing workspaced behavior
if workspaced:
@@ -333,8 +333,9 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
etc_result.assert_success()
else:
# Assert that we have the expected provenance encoded into the error
- provenance = yaml_file_get_provenance(
- element_path, 'junction-dep.bst', key='depends', indices=[0])
+ element_node = _yaml.load(element_path, shortname='junction-dep.bst')
+ ref_node = element_node.get_sequence('depends').mapping_at(0)
+ provenance = ref_node.get_provenance()
assert str(provenance) in dep_result.stderr
dep_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
@@ -365,7 +366,7 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
result = cli.run(project=project, silent=True, args=[
'source', 'fetch', 'junction.bst'])
@@ -420,7 +421,7 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
}
if i == 0:
del element['depends']
- _yaml.dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
open(source, 'x').close()
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
index 38041c45d..1831ee863 100644
--- a/tests/frontend/source_checkout.py
+++ b/tests/frontend/source_checkout.py
@@ -154,7 +154,7 @@ def test_source_checkout_fetch(datafiles, cli):
element = generate_remote_import_element(
os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
'pony.h')
- _yaml.dump(element, target_path)
+ _yaml.roundtrip_dump(element, target_path)
# Testing implicit fetching requires that we do not have the sources
# cached already
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 808bf0593..a628043d8 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -9,7 +9,7 @@ from buildstream.testing import create_repo
from buildstream.testing import cli # pylint: disable=unused-import
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream import _yaml
-from tests.testutils import generate_junction, yaml_file_get_provenance
+from tests.testutils import generate_junction
from . import configure_project
# Project directory
@@ -27,7 +27,7 @@ def generate_element(repo, element_path, dep_name=None):
if dep_name:
element['depends'] = [dep_name]
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
@pytest.mark.datafiles(DATA_DIR)
@@ -156,7 +156,7 @@ def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storag
'name': 'test',
'ref-storage': ref_storage
}
- _yaml.dump(project_conf, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_conf, os.path.join(project, 'project.conf'))
#
# FIXME: This can be simplified when we have support
@@ -267,7 +267,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Now try to track it, this will bail with the appropriate error
# informing the user to track the junction first
@@ -275,8 +275,9 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- provenance = yaml_file_get_provenance(
- element_path, 'junction-dep.bst', key='depends', indices=[0])
+ element_node = _yaml.load(element_path, shortname='junction-dep.bst')
+ ref_node = element_node.get_sequence('depends').mapping_at(0)
+ provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
@@ -306,15 +307,16 @@ def test_junction_element(cli, tmpdir, datafiles, ref_storage):
}
]
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# First demonstrate that showing the pipeline yields an error
result = cli.run(project=project, args=['show', 'junction-dep.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- provenance = yaml_file_get_provenance(
- element_path, 'junction-dep.bst', key='depends', indices=[0])
+ element_node = _yaml.load(element_path, shortname='junction-dep.bst')
+ ref_node = element_node.get_sequence('depends').mapping_at(0)
+ provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
# Now track the junction itself
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 5267f8e56..6e23ec488 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -92,9 +92,8 @@ class WorkspaceCreator():
}
if element_attrs:
element = {**element, **element_attrs}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
@@ -184,11 +183,11 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
# Check that the correct origin branch is set
element_config = _yaml.load(os.path.join(project, "elements", element_name))
- source_config = _yaml.node_get(element_config, dict, 'sources', [0])
+ source_config = element_config.get_sequence('sources').mapping_at(0)
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
- stripped_url = _yaml.node_get(source_config, str, 'url').lstrip("file:///")
+ stripped_url = source_config.get_str('url').lstrip("file:///")
expected_output_str = ("checkout of branch: /{}/{}"
- .format(stripped_url, _yaml.node_get(source_config, str, 'track')))
+ .format(stripped_url, source_config.get_str('track')))
assert expected_output_str in str(output)
@@ -608,13 +607,12 @@ def test_list(cli, tmpdir, datafiles):
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert isinstance(_yaml.node_get(loaded, None, 'workspaces'), list)
- workspaces = _yaml.node_get(loaded, list, 'workspaces')
+ workspaces = loaded.get_sequence('workspaces')
assert len(workspaces) == 1
- space = workspaces[0]
- assert _yaml.node_get(space, str, 'element') == element_name
- assert _yaml.node_get(space, str, 'directory') == workspace
+ space = workspaces.mapping_at(0)
+ assert space.get_str('element') == element_name
+ assert space.get_str('directory') == workspace
@pytest.mark.datafiles(DATA_DIR)
@@ -684,9 +682,8 @@ def test_buildable_no_ref(cli, tmpdir, datafiles):
repo.source_config()
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Assert that this target is not buildable when no workspace is associated.
assert cli.get_element_state(project, element_name) == 'no reference'
@@ -805,7 +802,7 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
os.makedirs(os.path.join(project, '.bst'))
workspace_config_path = os.path.join(project, '.bst', 'workspaces.yml')
- _yaml.dump(workspace_cfg, workspace_config_path)
+ _yaml.roundtrip_dump(workspace_cfg, workspace_config_path)
result = cli.run(project=project, args=['workspace', 'list'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -910,20 +907,20 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expected):
def parse_dict_as_yaml(node):
tempfile = os.path.join(str(tmpdir), 'yaml_dump')
- _yaml.dump(node, tempfile)
- return _yaml.node_sanitize(_yaml.load(tempfile))
+ _yaml.roundtrip_dump(node, tempfile)
+ return _yaml.load(tempfile)._strip_node_info()
project = str(datafiles)
os.makedirs(os.path.join(project, '.bst'))
workspace_config_path = os.path.join(project, '.bst', 'workspaces.yml')
- _yaml.dump(workspace_cfg, workspace_config_path)
+ _yaml.roundtrip_dump(workspace_cfg, workspace_config_path)
# Check that we can still read workspace config that is in old format
result = cli.run(project=project, args=['workspace', 'list'])
result.assert_success()
- loaded_config = _yaml.node_sanitize(_yaml.load(workspace_config_path))
+ loaded_config = _yaml.load(workspace_config_path)._strip_node_info()
# Check that workspace config remains the same if no modifications
# to workspaces were made
@@ -948,9 +945,8 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
repo.source_config(ref=ref)
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, element_name))
# Make a change to the workspaces file
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
@@ -959,7 +955,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
result.assert_success()
# Check that workspace config is converted correctly if necessary
- loaded_config = _yaml.node_sanitize(_yaml.load(workspace_config_path))
+ loaded_config = _yaml.load(workspace_config_path)._strip_node_info()
assert loaded_config == parse_dict_as_yaml(expected)
@@ -995,9 +991,8 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
}
]
}
- _yaml.dump(element,
- os.path.join(element_path,
- back_dep_element_name))
+ _yaml.roundtrip_dump(element,
+ os.path.join(element_path, back_dep_element_name))
# Modify workspace
shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
@@ -1133,15 +1128,15 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
# Delete the ref from the source so that we can detect if the
# element has been tracked
element_contents = _yaml.load(element_file)
- _yaml.node_del(_yaml.node_get(element_contents, dict, 'sources', [0]), 'ref')
- _yaml.dump(element_contents, element_file)
+ del element_contents.get_sequence('sources').mapping_at(0)['ref']
+ _yaml.roundtrip_dump(element_contents, element_file)
result = cli.run(project=project, args=['-C', workspace, 'source', 'track', *arg_elm])
result.assert_success()
# Element is tracked now
element_contents = _yaml.load(element_file)
- assert 'ref' in _yaml.node_get(element_contents, dict, 'sources', [0])
+ assert 'ref' in element_contents.get_sequence('sources').mapping_at(0)
@pytest.mark.datafiles(DATA_DIR)
@@ -1248,7 +1243,7 @@ def test_multisource_workspace(cli, datafiles, tmpdir):
}]
}
element_path = os.path.join(project, 'elements', element_name)
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
workspace_dir = os.path.join(str(tmpdir), 'multisource')
res = cli.run(project=project,
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
index be7db3357..467d3968f 100644
--- a/tests/integration/cachedfail.py
+++ b/tests/integration/cachedfail.py
@@ -61,7 +61,7 @@ def test_build_checkout_cached_fail(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Try to build it, this should result in a failure that contains the content
result = cli.run(project=project, args=['build', 'element.bst'])
@@ -103,7 +103,7 @@ def test_build_depend_on_cached_fail(cli, datafiles):
],
},
}
- _yaml.dump(dep, dep_path)
+ _yaml.roundtrip_dump(dep, dep_path)
target = {
'kind': 'script',
'depends': [
@@ -122,7 +122,7 @@ def test_build_depend_on_cached_fail(cli, datafiles):
],
},
}
- _yaml.dump(target, target_path)
+ _yaml.roundtrip_dump(target, target_path)
# Try to build it, this should result in caching a failure to build dep
result = cli.run(project=project, args=['build', 'dep.bst'])
@@ -166,7 +166,7 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as share:
cli.configure({
@@ -204,7 +204,7 @@ def test_host_tools_errors_are_not_cached(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
clean_platform_cache()
diff --git a/tests/integration/compose.py b/tests/integration/compose.py
index 2b37942fa..3562ed94b 100644
--- a/tests/integration/compose.py
+++ b/tests/integration/compose.py
@@ -36,7 +36,7 @@ def create_compose_element(name, path, config=None):
'config': config
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/integration/import.py b/tests/integration/import.py
index 5371ec61a..bac92cadf 100644
--- a/tests/integration/import.py
+++ b/tests/integration/import.py
@@ -32,7 +32,7 @@ def create_import_element(name, path, source, target, source_path):
}
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/integration/manual.py b/tests/integration/manual.py
index b3124a852..2ac7f74d0 100644
--- a/tests/integration/manual.py
+++ b/tests/integration/manual.py
@@ -31,7 +31,7 @@ def create_manual_element(name, path, config, variables, environment):
'environment': environment
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/integration/messages.py b/tests/integration/messages.py
index edfb435ae..42725fc5b 100644
--- a/tests/integration/messages.py
+++ b/tests/integration/messages.py
@@ -59,7 +59,7 @@ def test_disable_message_lines(cli, datafiles):
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the "Silly message"
result = cli.run(project=project, args=["build", element_name])
@@ -94,7 +94,7 @@ def test_disable_error_lines(cli, datafiles):
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the syntax error
result = cli.run(project=project, args=["--error-lines", "0",
diff --git a/tests/integration/pip_element.py b/tests/integration/pip_element.py
index 91dcaa39e..da0badcb3 100644
--- a/tests/integration/pip_element.py
+++ b/tests/integration/pip_element.py
@@ -47,7 +47,7 @@ def test_pip_build(cli, datafiles):
}]
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=['build', element_name])
assert result.exit_code == 0
@@ -119,7 +119,7 @@ def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
}
]
}
- _yaml.dump(element, os.path.join(elements_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(elements_path, element_name))
result = cli.run(project=project, args=['source', 'track', element_name])
assert result.exit_code == 0
diff --git a/tests/integration/pip_source.py b/tests/integration/pip_source.py
index 632b5ae24..c221910a6 100644
--- a/tests/integration/pip_source.py
+++ b/tests/integration/pip_source.py
@@ -58,7 +58,7 @@ def test_pip_source_import_packages(cli, datafiles, setup_pypi_repo):
]
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=['source', 'track', element_name])
assert result.exit_code == 0
@@ -116,7 +116,7 @@ def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo):
]
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=['source', 'track', element_name])
assert result.exit_code == 0
@@ -182,7 +182,7 @@ def test_pip_source_build(cli, datafiles, setup_pypi_repo):
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=['source', 'track', element_name])
assert result.exit_code == 0
diff --git a/tests/integration/script.py b/tests/integration/script.py
index 1025709f4..fc57e8744 100644
--- a/tests/integration/script.py
+++ b/tests/integration/script.py
@@ -35,7 +35,7 @@ def create_script_element(name, path, config=None, variables=None):
'variables': variables
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index f7de3e462..a1f38d879 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -163,7 +163,7 @@ def test_no_shell(cli, datafiles):
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = execute_shell(cli, project, ['/bin/echo', 'Pegasissies!'], element=element_name)
assert result.exit_code == 0
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index 4590d4102..70c4b79de 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -60,7 +60,7 @@ def test_deterministic_source_local(cli, tmpdir, datafiles):
]
}
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
def get_value_for_mask(mask):
checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(mask))
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index fff9518a3..78379912c 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -74,7 +74,7 @@ def test_workspace_updated_dependency(cli, datafiles):
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
@@ -90,7 +90,7 @@ def test_workspace_updated_dependency(cli, datafiles):
'mkdir -p %{install-root}/etc/test/',
'echo "Hello china!" > %{install-root}/etc/test/hello.txt'
]
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# `Make` would look at timestamps and normally not realize that
# our dependency's header files changed. BuildStream must
@@ -129,7 +129,7 @@ def test_workspace_update_dependency_failed(cli, datafiles):
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
@@ -146,7 +146,7 @@ def test_workspace_update_dependency_failed(cli, datafiles):
'echo "Hello china!" > %{install-root}/etc/test/hello.txt',
'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt'
]
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# And our build fails!
with open(os.path.join(workspace, 'Makefile'), 'a') as f:
@@ -161,7 +161,7 @@ def test_workspace_update_dependency_failed(cli, datafiles):
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
'echo "Hello spain!" > %{install-root}/etc/test/brazil.txt'
]
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# And fix the source
with open(os.path.join(workspace, 'Makefile'), 'r') as f:
@@ -204,7 +204,7 @@ def test_updated_dependency_nested(cli, datafiles):
}
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
@@ -221,7 +221,7 @@ def test_updated_dependency_nested(cli, datafiles):
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
'echo "Hello test!" > %{install-root}/etc/test/tests/tests.txt'
]
- _yaml.dump(dependency, os.path.join(element_path, dep_name))
+ _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
res = cli.run(project=project, args=['build', element_name])
assert res.exit_code == 0
@@ -257,7 +257,7 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
]
}
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# We open a workspace on the above element
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
diff --git a/tests/internals/pluginfactory.py b/tests/internals/pluginfactory.py
index e9e63672f..b3f77c8b1 100644
--- a/tests/internals/pluginfactory.py
+++ b/tests/internals/pluginfactory.py
@@ -5,10 +5,10 @@ import os
import pytest
from pluginbase import PluginBase
+from buildstream import Node
from buildstream._elementfactory import ElementFactory
from buildstream._sourcefactory import SourceFactory
from buildstream._exceptions import PluginError
-from buildstream import _yaml
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -47,7 +47,7 @@ def test_element_factory(plugin_fixture):
##############################################################
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource'))
def test_custom_source(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -62,7 +62,7 @@ def test_custom_source(plugin_fixture, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement'))
def test_custom_element(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -99,7 +99,7 @@ def test_missing_element(plugin_fixture):
# Load a factory with a plugin that returns a value instead of Source subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
def test_source_notatype(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -113,7 +113,7 @@ def test_source_notatype(plugin_fixture, datafiles):
# Load a factory with a plugin that returns a value instead of Element subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
def test_element_notatype(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -128,7 +128,7 @@ def test_element_notatype(plugin_fixture, datafiles):
# which is not a Source subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
def test_source_wrongtype(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -143,7 +143,7 @@ def test_source_wrongtype(plugin_fixture, datafiles):
# which is not a Element subclass
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
def test_element_wrongtype(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -157,7 +157,7 @@ def test_element_wrongtype(plugin_fixture, datafiles):
# Load a factory with a plugin which fails to provide a setup() function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
def test_source_missing_setup(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -171,7 +171,7 @@ def test_source_missing_setup(plugin_fixture, datafiles):
# Load a factory with a plugin which fails to provide a setup() function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
def test_element_missing_setup(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -186,7 +186,7 @@ def test_element_missing_setup(plugin_fixture, datafiles):
# that is not a function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
def test_source_bad_setup(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -201,7 +201,7 @@ def test_source_bad_setup(plugin_fixture, datafiles):
# that is not a function
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
def test_element_bad_setup(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -216,7 +216,7 @@ def test_element_bad_setup(plugin_fixture, datafiles):
# high version of buildstream
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource'))
def test_source_badversion(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -231,7 +231,7 @@ def test_source_badversion(plugin_fixture, datafiles):
# high version of buildstream
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionelement'))
def test_element_badversion(plugin_fixture, datafiles):
- plugins = [_yaml.new_node_from_dict({
+ plugins = [Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename),
@@ -249,14 +249,14 @@ def test_element_badversion(plugin_fixture, datafiles):
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_source_multicontext(plugin_fixture, datafiles):
- plugins1 = _yaml.new_node_from_dict({
+ plugins1 = Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'customsource'),
'plugins': ['foo']
})
- plugins2 = _yaml.new_node_from_dict({
+ plugins2 = Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
@@ -278,14 +278,14 @@ def test_source_multicontext(plugin_fixture, datafiles):
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_element_multicontext(plugin_fixture, datafiles):
- plugins1 = _yaml.new_node_from_dict({
+ plugins1 = Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
'customelement'),
'plugins': ['foo']
})
- plugins2 = _yaml.new_node_from_dict({
+ plugins2 = Node.from_dict({
'origin': 'local',
'path': os.path.join(datafiles.dirname,
datafiles.basename,
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index cd3bdc535..d94d006e3 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -3,7 +3,7 @@ from io import StringIO
import pytest
-from buildstream import _yaml
+from buildstream import _yaml, Node, ProvenanceInformation, SequenceNode
from buildstream._exceptions import LoadError, LoadErrorReason
@@ -21,17 +21,17 @@ def test_load_yaml(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.value.get('kind').value == 'pony'
+ assert loaded.get_str('kind') == 'pony'
-def assert_provenance(filename, line, col, node, key=None, indices=None):
- provenance = _yaml.node_get_provenance(node, key=key, indices=indices)
+def assert_provenance(filename, line, col, node):
+ provenance = node.get_provenance()
- assert isinstance(provenance, _yaml.ProvenanceInformation)
+ assert isinstance(provenance, ProvenanceInformation)
- assert provenance.shortname == filename
- assert provenance.line == line
- assert provenance.col == col
+ assert provenance._shortname == filename
+ assert provenance._line == line
+ assert provenance._col == col
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -42,7 +42,7 @@ def test_basic_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.value.get('kind').value == 'pony'
+ assert loaded.get_str('kind') == 'pony'
assert_provenance(filename, 1, 0, loaded)
@@ -55,8 +55,8 @@ def test_member_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.value.get('kind').value == 'pony'
- assert_provenance(filename, 2, 13, loaded, 'description')
+ assert loaded.get_str('kind') == 'pony'
+ assert_provenance(filename, 2, 13, loaded.get_scalar('description'))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -67,12 +67,12 @@ def test_element_provenance(datafiles):
'basics.yaml')
loaded = _yaml.load(filename)
- assert loaded.value.get('kind').value == 'pony'
- assert_provenance(filename, 5, 2, loaded, 'moods', [1])
+ assert loaded.get_str('kind') == 'pony'
+ assert_provenance(filename, 5, 2, loaded.get_sequence('moods').scalar_at(1))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-def test_node_validate(datafiles):
+def test_mapping_validate_keys(datafiles):
valid = os.path.join(datafiles.dirname,
datafiles.basename,
@@ -83,12 +83,12 @@ def test_node_validate(datafiles):
base = _yaml.load(valid)
- _yaml.node_validate(base, ['kind', 'description', 'moods', 'children', 'extra'])
+ base.validate_keys(['kind', 'description', 'moods', 'children', 'extra'])
base = _yaml.load(invalid)
with pytest.raises(LoadError) as exc:
- _yaml.node_validate(base, ['kind', 'description', 'moods', 'children', 'extra'])
+ base.validate_keys(['kind', 'description', 'moods', 'children', 'extra'])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -101,18 +101,18 @@ def test_node_get(datafiles):
'basics.yaml')
base = _yaml.load(filename)
- assert base.value.get('kind').value == 'pony'
+ assert base.get_str('kind') == 'pony'
- children = _yaml.node_get(base, list, 'children')
- assert isinstance(children, list)
+ children = base.get_sequence('children')
+ assert isinstance(children, SequenceNode)
assert len(children) == 7
- child = _yaml.node_get(base, dict, 'children', indices=[6])
- assert_provenance(filename, 20, 8, child, 'mood')
+ child = base.get_sequence('children').mapping_at(6)
+ assert_provenance(filename, 20, 8, child.get_scalar('mood'))
- extra = _yaml.node_get(base, dict, 'extra')
+ extra = base.get_mapping('extra')
with pytest.raises(LoadError) as exc:
- _yaml.node_get(extra, dict, 'old')
+ extra.get_mapping('old')
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -127,8 +127,8 @@ def test_node_set(datafiles):
base = _yaml.load(filename)
assert 'mother' not in base
- _yaml.node_set(base, 'mother', 'snow white')
- assert _yaml.node_get(base, str, 'mother') == 'snow white'
+ base['mother'] = 'snow white'
+ assert base.get_str('mother') == 'snow white'
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -141,14 +141,14 @@ def test_node_set_overwrite(datafiles):
base = _yaml.load(filename)
# Overwrite a string
- assert _yaml.node_get(base, str, 'kind') == 'pony'
- _yaml.node_set(base, 'kind', 'cow')
- assert _yaml.node_get(base, str, 'kind') == 'cow'
+ assert base.get_str('kind') == 'pony'
+ base['kind'] = 'cow'
+ assert base.get_str('kind') == 'cow'
# Overwrite a list as a string
- assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad']
- _yaml.node_set(base, 'moods', 'unemotional')
- assert _yaml.node_get(base, str, 'moods') == 'unemotional'
+ assert base.get_sequence('moods').as_str_list() == ['happy', 'sad']
+ base['moods'] = 'unemotional'
+ assert base.get_str('moods') == 'unemotional'
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -160,13 +160,10 @@ def test_node_set_list_element(datafiles):
base = _yaml.load(filename)
- assert _yaml.node_get(base, list, 'moods') == ['happy', 'sad']
- assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'happy'
+ assert base.get_sequence('moods').as_str_list() == ['happy', 'sad']
+ base.get_sequence('moods')[0] = 'confused'
- _yaml.node_set(base, 'moods', 'confused', indices=[0])
-
- assert _yaml.node_get(base, list, 'moods') == ['confused', 'sad']
- assert _yaml.node_get(base, str, 'moods', indices=[0]) == 'confused'
+ assert base.get_sequence('moods').as_str_list() == ['confused', 'sad']
# Really this is testing _yaml.node_copy(), we want to
@@ -185,17 +182,17 @@ def test_composite_preserve_originals(datafiles):
base = _yaml.load(filename)
overlay = _yaml.load(overlayfile)
- base_copy = _yaml.node_copy(base)
- _yaml.composite_dict(base_copy, overlay)
+ base_copy = base.clone()
+ overlay._composite(base_copy)
- copy_extra = _yaml.node_get(base_copy, dict, 'extra')
- orig_extra = _yaml.node_get(base, dict, 'extra')
+ copy_extra = base_copy.get_mapping('extra')
+ orig_extra = base.get_mapping('extra')
# Test that the node copy has the overridden value...
- assert _yaml.node_get(copy_extra, str, 'old') == 'override'
+ assert copy_extra.get_str('old') == 'override'
# But the original node is not effected by the override.
- assert _yaml.node_get(orig_extra, str, 'old') == 'new'
+ assert orig_extra.get_str('old') == 'new'
# Tests for list composition
@@ -252,14 +249,14 @@ def test_list_composition(datafiles, filename, tmpdir,
base = _yaml.load(base_file, 'basics.yaml')
overlay = _yaml.load(overlay_file, shortname=filename)
- _yaml.composite_dict(base, overlay)
+ overlay._composite(base)
- children = _yaml.node_get(base, list, 'children')
+ children = base.get_sequence('children')
assert len(children) == length
- child = children[index]
+ child = children.mapping_at(index)
- assert _yaml.node_get(child, str, 'mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
+ assert child.get_str('mood') == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
# Test that overwriting a list with an empty list works as expected.
@@ -270,26 +267,12 @@ def test_list_deletion(datafiles):
base = _yaml.load(base, shortname='basics.yaml')
overlay = _yaml.load(overlay, shortname='listoverwriteempty.yaml')
- _yaml.composite_dict(base, overlay)
+ overlay._composite(base)
- children = _yaml.node_get(base, list, 'children')
+ children = base.get_sequence('children')
assert not children
-# Test that extending a non-existent list works as expected
-@pytest.mark.datafiles(os.path.join(DATA_DIR))
-def test_nonexistent_list_extension(datafiles):
- base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
-
- base = _yaml.load(base, shortname='basics.yaml')
- assert 'todo' not in base
-
- _yaml.node_extend_list(base, 'todo', 3, 'empty')
-
- assert len(_yaml.node_get(base, list, 'todo')) == 3
- assert _yaml.node_get(base, list, 'todo') == ['empty', 'empty', 'empty']
-
-
# Tests for deep list composition
#
# Same as test_list_composition(), but adds an additional file
@@ -387,15 +370,15 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
overlay1 = _yaml.load(file1, shortname=filename1)
overlay2 = _yaml.load(file2, shortname=filename2)
- _yaml.composite_dict(base, overlay1)
- _yaml.composite_dict(base, overlay2)
+ overlay1._composite(base)
+ overlay2._composite(base)
- children = _yaml.node_get(base, list, 'children')
+ children = base.get_sequence('children')
assert len(children) == length
- child = children[index]
+ child = children.mapping_at(index)
- assert _yaml.node_get(child, str, 'mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
+ assert child.get_str('mood') == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
#####################
# Round 2 - Fight !
@@ -404,15 +387,15 @@ def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
overlay1 = _yaml.load(file1, shortname=filename1)
overlay2 = _yaml.load(file2, shortname=filename2)
- _yaml.composite_dict(overlay1, overlay2)
- _yaml.composite_dict(base, overlay1)
+ overlay2._composite(overlay1)
+ overlay1._composite(base)
- children = _yaml.node_get(base, list, 'children')
+ children = base.get_sequence('children')
assert len(children) == length
- child = children[index]
+ child = children.mapping_at(index)
- assert _yaml.node_get(child, str, 'mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child, 'mood')
+ assert child.get_str('mood') == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@@ -424,19 +407,19 @@ def test_convert_value_to_string(datafiles):
# Run file through yaml to convert it
test_dict = _yaml.load(conf_file)
- user_config = _yaml.node_get(test_dict, str, "Test1")
+ user_config = test_dict.get_str("Test1")
assert isinstance(user_config, str)
assert user_config == "1_23_4"
- user_config = _yaml.node_get(test_dict, str, "Test2")
+ user_config = test_dict.get_str("Test2")
assert isinstance(user_config, str)
assert user_config == "1.23.4"
- user_config = _yaml.node_get(test_dict, str, "Test3")
+ user_config = test_dict.get_str("Test3")
assert isinstance(user_config, str)
assert user_config == "1.20"
- user_config = _yaml.node_get(test_dict, str, "Test4")
+ user_config = test_dict.get_str("Test4")
assert isinstance(user_config, str)
assert user_config == "OneTwoThree"
@@ -451,7 +434,7 @@ def test_value_doesnt_match_expected(datafiles):
test_dict = _yaml.load(conf_file)
with pytest.raises(LoadError) as exc:
- _yaml.node_get(test_dict, int, "Test4")
+ test_dict.get_int("Test4")
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -511,9 +494,9 @@ def test_node_find_target(datafiles, case):
# are not the same nodes as in `prov.toplevel`
loaded = _yaml.load(filename, copy_tree=True)
- prov = _yaml.node_get_provenance(loaded)
+ prov = loaded.get_provenance()
- toplevel = prov.toplevel
+ toplevel = prov._toplevel
assert toplevel is not loaded
@@ -521,12 +504,19 @@ def test_node_find_target(datafiles, case):
# laid out. Client code should never do this.
def _walk(node, entry, rest):
if rest:
- return _walk(node.value[entry], rest[0], rest[1:])
+ if isinstance(entry, int):
+ new_node = node.node_at(entry)
+ else:
+ new_node = node.get_node(entry)
+
+ return _walk(new_node, rest[0], rest[1:])
else:
- return node.value[entry]
+ if isinstance(entry, int):
+ return node.node_at(entry)
+ return node.get_node(entry)
want = _walk(loaded, case[0], case[1:])
- found_path = _yaml.node_find_target(toplevel, want)
+ found_path = toplevel._find(want)
assert case == found_path
@@ -538,6 +528,6 @@ def test_node_find_target_fails(datafiles):
"traversal.yaml")
loaded = _yaml.load(filename, copy_tree=True)
- brand_new = _yaml.new_empty_node()
+ brand_new = Node.from_dict({})
- assert _yaml.node_find_target(loaded, brand_new) is None
+ assert loaded._find(brand_new) is None
diff --git a/tests/remoteexecution/buildfail.py b/tests/remoteexecution/buildfail.py
index 031e5829e..0fb4cdb95 100644
--- a/tests/remoteexecution/buildfail.py
+++ b/tests/remoteexecution/buildfail.py
@@ -56,7 +56,7 @@ def test_build_remote_failure(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
services = cli.ensure_services()
assert set(services) == set(['action-cache', 'execution', 'storage'])
diff --git a/tests/remoteexecution/junction.py b/tests/remoteexecution/junction.py
index 8130e4c7c..87d00a40d 100644
--- a/tests/remoteexecution/junction.py
+++ b/tests/remoteexecution/junction.py
@@ -37,7 +37,7 @@ DATA_DIR = os.path.join(
def configure_project(path, config):
config['name'] = 'test'
config['element-path'] = 'elements'
- _yaml.dump(config, os.path.join(path, 'project.conf'))
+ _yaml.roundtrip_dump(config, os.path.join(path, 'project.conf'))
def create_element(repo, name, path, dependencies, ref=None):
@@ -48,7 +48,7 @@ def create_element(repo, name, path, dependencies, ref=None):
],
'depends': dependencies
}
- _yaml.dump(element, os.path.join(path, name))
+ _yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
@@ -97,7 +97,7 @@ def test_junction_build_remote(cli, tmpdir, datafiles):
}
]
}
- _yaml.dump(element, os.path.join(element_path, 'composed.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, 'composed.bst'))
# We're doing remote execution so ensure services are available
services = cli.ensure_services()
diff --git a/tests/sandboxes/fallback.py b/tests/sandboxes/fallback.py
index eebe7ddb2..d8170c61d 100644
--- a/tests/sandboxes/fallback.py
+++ b/tests/sandboxes/fallback.py
@@ -54,7 +54,7 @@ def test_fallback_platform_fails(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
clean_platform_cache()
diff --git a/tests/sandboxes/missing_dependencies.py b/tests/sandboxes/missing_dependencies.py
index 79153f769..33a169ca2 100644
--- a/tests/sandboxes/missing_dependencies.py
+++ b/tests/sandboxes/missing_dependencies.py
@@ -39,7 +39,7 @@ def test_missing_brwap_has_nice_error_message(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this should fail with a nice error
result = cli.run(
@@ -82,7 +82,7 @@ def test_old_brwap_has_nice_error_message(cli, datafiles, tmp_path):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this should fail with a nice error
result = cli.run(
diff --git a/tests/sandboxes/remote-exec-config.py b/tests/sandboxes/remote-exec-config.py
index 90418d6fc..a6aeeb7ab 100644
--- a/tests/sandboxes/remote-exec-config.py
+++ b/tests/sandboxes/remote-exec-config.py
@@ -38,7 +38,7 @@ def test_old_and_new_configs(cli, datafiles):
}
}
project_conf_file = os.path.join(project, 'project.conf')
- _yaml.dump(project_conf, project_conf_file)
+ _yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
@@ -72,7 +72,7 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
}
}
project_conf_file = os.path.join(project, 'project.conf')
- _yaml.dump(project_conf, project_conf_file)
+ _yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
@@ -93,7 +93,7 @@ def test_empty_config(cli, datafiles):
}
}
project_conf_file = os.path.join(project, 'project.conf')
- _yaml.dump(project_conf, project_conf_file)
+ _yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
diff --git a/tests/sandboxes/selection.py b/tests/sandboxes/selection.py
index c20ce3d3a..50406b4cb 100644
--- a/tests/sandboxes/selection.py
+++ b/tests/sandboxes/selection.py
@@ -54,7 +54,7 @@ def test_force_sandbox(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
clean_platform_cache()
@@ -87,7 +87,7 @@ def test_dummy_sandbox_fallback(cli, datafiles):
],
},
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
clean_platform_cache()
diff --git a/tests/sourcecache/cache.py b/tests/sourcecache/cache.py
index 793344ef0..9aa2c67ac 100644
--- a/tests/sourcecache/cache.py
+++ b/tests/sourcecache/cache.py
@@ -96,7 +96,7 @@ def test_source_cache_key(cli, datafiles):
}
]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
res = cli.run(project=project_dir, args=["source", "track", element_name])
res.assert_success()
diff --git a/tests/sourcecache/config.py b/tests/sourcecache/config.py
index 7c33adbe0..2ab11e9f9 100644
--- a/tests/sourcecache/config.py
+++ b/tests/sourcecache/config.py
@@ -53,7 +53,7 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
}
}
project_conf_file = os.path.join(project, 'project.conf')
- _yaml.dump(project_conf, project_conf_file)
+ _yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index de8587862..300f0c84c 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -56,7 +56,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
repo = create_repo('git', str(tmpdir))
@@ -67,7 +67,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
context = Context()
context.load(config=user_config_file)
@@ -138,7 +138,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
repo = create_repo('git', str(tmpdir))
@@ -149,7 +149,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
context = Context()
context.load(config=user_config_file)
@@ -195,7 +195,7 @@ def test_pull_fail(cli, tmpdir, datafiles):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
repo = create_repo('git', str(tmpdir))
@@ -206,7 +206,7 @@ def test_pull_fail(cli, tmpdir, datafiles):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
context = Context()
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 23f5f1ca1..e9c72d47c 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -56,7 +56,7 @@ def test_source_push(cli, tmpdir, datafiles):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
repo = create_repo('git', str(tmpdir))
@@ -67,7 +67,7 @@ def test_source_push(cli, tmpdir, datafiles):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
context = Context()
@@ -116,7 +116,7 @@ def test_push_pull(cli, datafiles, tmpdir):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
# create repo to pull from
@@ -128,7 +128,7 @@ def test_push_pull(cli, datafiles, tmpdir):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
res = cli.run(project=project_dir, args=['build', 'push.bst'])
res.assert_success()
@@ -162,7 +162,7 @@ def test_push_fail(cli, tmpdir, datafiles):
},
'cachedir': cache_dir,
}
- _yaml.dump(user_config, filename=user_config_file)
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
# create repo to pull from
@@ -174,7 +174,7 @@ def test_push_fail(cli, tmpdir, datafiles):
'kind': 'import',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# build and check that it fails to set up the remote
res = cli.run(project=project_dir, args=['build', 'push.bst'])
@@ -212,7 +212,7 @@ def test_source_push_build_fail(cli, tmpdir, datafiles):
'kind': 'always_fail',
'sources': [repo.source_config(ref=ref)]
}
- _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
res = cli.run(project=project_dir, args=['build', 'always-fail.bst'])
res.assert_main_error(ErrorDomain.STREAM, None)
diff --git a/tests/sources/bzr.py b/tests/sources/bzr.py
index 4a66d89b3..c6e78f8c1 100644
--- a/tests/sources/bzr.py
+++ b/tests/sources/bzr.py
@@ -32,7 +32,7 @@ def test_fetch_checkout(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
diff --git a/tests/sources/deb.py b/tests/sources/deb.py
index bdde20aaa..e536e522a 100644
--- a/tests/sources/deb.py
+++ b/tests/sources/deb.py
@@ -22,7 +22,7 @@ deb_name = "a_deb.deb"
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 45a2b827a..245f90131 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -30,7 +30,7 @@ import shutil
import pytest
from buildstream._exceptions import ErrorDomain
-from buildstream import _yaml
+from buildstream import _yaml, Node
from buildstream.plugin import CoreWarnings
from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
@@ -58,7 +58,7 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles):
repo.source_config(ref='5')
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Assert that fetch raises an error here
result = cli.run(project=project, args=[
@@ -92,7 +92,7 @@ def test_submodule_fetch_checkout(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -131,7 +131,7 @@ def test_submodule_fetch_source_enable_explicit(cli, tmpdir, datafiles):
repo.source_config_extra(ref=ref, checkout_submodules=True)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -170,7 +170,7 @@ def test_submodule_fetch_source_disable(cli, tmpdir, datafiles):
repo.source_config_extra(ref=ref, checkout_submodules=False)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -209,7 +209,7 @@ def test_submodule_fetch_submodule_does_override(cli, tmpdir, datafiles):
repo.source_config_extra(ref=ref, checkout_submodules=False)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -253,7 +253,7 @@ def test_submodule_fetch_submodule_individual_checkout(cli, tmpdir, datafiles):
repo.source_config_extra(ref=ref, checkout_submodules=True)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -298,7 +298,7 @@ def test_submodule_fetch_submodule_individual_checkout_explicit(cli, tmpdir, dat
repo.source_config_extra(ref=ref, checkout_submodules=True)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -338,7 +338,7 @@ def test_submodule_fetch_project_override(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch, build, checkout
result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
@@ -369,7 +369,7 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles):
repo.source_config(ref=ref)
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Now add a .gitmodules file with an inconsistent submodule,
# we are calling this inconsistent because the file was created
@@ -409,7 +409,7 @@ def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Track will encounter an inconsistent submodule without any ref
result = cli.run(project=project, args=['show', 'target.bst'])
@@ -429,7 +429,7 @@ def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
"name": "foo",
"fatal-warnings": [CoreWarnings.REF_NOT_IN_TRACK]
}
- _yaml.dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
# Create the repo from 'repofiles', create a branch without latest commit
repo = create_repo('git', str(tmpdir))
@@ -447,7 +447,7 @@ def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
gitsource
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
result = cli.run(project=project, args=['build', 'target.bst'])
@@ -472,7 +472,7 @@ def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
"name": "foo",
"fatal-warnings": ['git:unlisted-submodule']
}
- _yaml.dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
# Create the submodule first from the 'subrepofiles' subdir
subrepo = create_repo('git', str(tmpdir), 'subrepo')
@@ -501,7 +501,7 @@ def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
gitsource
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# We will not see the warning or error before the first fetch, because
# we don't have the repository yet and so we have no knowledge of
@@ -545,7 +545,7 @@ def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
"name": "foo",
"fatal-warnings": ['git:unlisted-submodule']
}
- _yaml.dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
# Create the submodule first from the 'subrepofiles' subdir
subrepo = create_repo('git', str(tmpdir), 'subrepo')
@@ -571,7 +571,7 @@ def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
gitsource
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch the repo, we will not see the warning because we
# are still pointing to a ref which predates the submodules
@@ -608,7 +608,7 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
"name": "foo",
"fatal-warnings": ['git:invalid-submodule']
}
- _yaml.dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
# Create the repo from 'repofiles' subdir
repo = create_repo('git', str(tmpdir))
@@ -635,7 +635,7 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
gitsource
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# We will not see the warning or error before the first fetch, because
# we don't have the repository yet and so we have no knowledge of
@@ -680,7 +680,7 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
"name": "foo",
"fatal-warnings": ['git:invalid-submodule']
}
- _yaml.dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
# Create the submodule first from the 'subrepofiles' subdir
subrepo = create_repo('git', str(tmpdir), 'subrepo')
@@ -707,7 +707,7 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
gitsource
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Fetch the repo, we will not see the warning because we
# are still pointing to a ref which predates the submodules
@@ -753,14 +753,14 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
}
element['sources'][0]['ref-format'] = ref_format
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
# Track it
result = cli.run(project=project, args=['source', 'track', 'target.bst'])
result.assert_success()
element = _yaml.load(element_path)
- new_ref = _yaml.node_get(_yaml.node_get(element, dict, 'sources', [0]), str, 'ref')
+ new_ref = element.get_sequence('sources').mapping_at(0).get_str('ref')
if ref_format == 'git-describe' and tag:
# Check and strip prefix
@@ -785,8 +785,8 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- _yaml.node_set(project_config, 'ref-storage', ref_storage)
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ project_config['ref-storage'] = ref_storage
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
os.makedirs(repofiles, exist_ok=True)
@@ -838,7 +838,7 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
],
}
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
if ref_storage == 'inline':
result = cli.run(project=project, args=['source', 'track', 'target.bst'])
@@ -849,16 +849,16 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
if ref_storage == 'inline':
element = _yaml.load(element_path)
- tags = _yaml.node_get(_yaml.node_get(element, dict, 'sources', [0]), list, 'tags')
+ tags = element.get_sequence('sources').mapping_at(0).get_sequence('tags')
assert len(tags) == 2
for tag in tags:
assert 'tag' in tag
assert 'commit' in tag
assert 'annotated' in tag
- assert _yaml.node_get(tag, bool, 'annotated') == (tag_type == 'annotated')
+ assert tag.get_bool('annotated') == (tag_type == 'annotated')
- assert {(_yaml.node_get(tag, str, 'tag'),
- _yaml.node_get(tag, str, 'commit'))
+ assert {(tag.get_str('tag'),
+ tag.get_str('commit'))
for tag in tags} == {('tag1', repo.rev_parse('tag1^{commit}')),
('tag2', repo.rev_parse('tag2^{commit}'))}
@@ -899,8 +899,8 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- _yaml.node_set(project_config, 'ref-storage', ref_storage)
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ project_config['ref-storage'] = ref_storage
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
os.makedirs(repofiles, exist_ok=True)
@@ -951,7 +951,7 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
],
}
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
if ref_storage == 'inline':
result = cli.run(project=project, args=['source', 'track', 'target.bst'])
@@ -962,18 +962,18 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
if ref_storage == 'inline':
element = _yaml.load(element_path)
- source = _yaml.node_get(element, dict, 'sources', indices=[0])
- tags = _yaml.node_get(source, list, 'tags')
+ source = element.get_sequence('sources').mapping_at(0)
+ tags = source.get_sequence('tags')
assert len(tags) == 1
- tag = _yaml.node_get(source, dict, 'tags', indices=[0])
+ tag = source.get_sequence('tags').mapping_at(0)
assert 'tag' in tag
assert 'commit' in tag
assert 'annotated' in tag
- assert _yaml.node_get(tag, bool, 'annotated') == (tag_type == 'annotated')
+ assert tag.get_bool('annotated') == (tag_type == 'annotated')
- tag_name = _yaml.node_get(tag, str, 'tag')
- commit = _yaml.node_get(tag, str, 'commit')
+ tag_name = tag.get_str('tag')
+ commit = tag.get_str('commit')
assert (tag_name, commit) == ('tag', repo.rev_parse('tag^{commit}'))
checkout = os.path.join(str(tmpdir), 'checkout')
@@ -1014,8 +1014,8 @@ def test_git_describe_relevant_history(cli, tmpdir, datafiles):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- _yaml.node_set(project_config, 'ref-storage', 'project.refs')
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ project_config['ref-storage'] = 'project.refs'
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
os.makedirs(repofiles, exist_ok=True)
@@ -1064,7 +1064,7 @@ def test_git_describe_relevant_history(cli, tmpdir, datafiles):
],
}
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
result.assert_success()
@@ -1094,8 +1094,8 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles):
project = str(datafiles)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- _yaml.node_set(project_config, 'ref-storage', 'inline')
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ project_config['ref-storage'] = 'inline'
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
repofiles = os.path.join(str(tmpdir), 'repofiles')
os.makedirs(repofiles, exist_ok=True)
@@ -1119,13 +1119,13 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles):
],
}
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
result = cli.run(project=project, args=['source', 'track', 'target.bst'])
result.assert_success()
element = _yaml.load(element_path)
- source = _yaml.node_get(element, dict, 'sources', indices=[0])
+ source = element.get_sequence('sources').mapping_at(0)
assert 'tags' not in source
@@ -1151,18 +1151,18 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
repodir, reponame = os.path.split(repo.repo)
project_config = _yaml.load(os.path.join(project, 'project.conf'))
- _yaml.node_set(project_config, 'aliases', _yaml.new_node_from_dict({
+ project_config['aliases'] = Node.from_dict({
'repo': 'http://example.com/'
- }))
- _yaml.node_set(project_config, 'mirrors', [
+ })
+ project_config['mirrors'] = [
{
'name': 'middle-earth',
'aliases': {
'repo': ['file://{}/'.format(repodir)]
}
}
- ])
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ ]
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
repo.add_annotated_tag('tag', 'tag')
@@ -1184,7 +1184,7 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
],
}
element_path = os.path.join(project, 'target.bst')
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
@@ -1208,7 +1208,7 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
repodir, reponame = os.path.split(repo.repo)
- _yaml.dump(project_config, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
config = repo.source_config(ref=new_ref)
del config['track']
@@ -1220,7 +1220,7 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
config
],
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
diff --git a/tests/sources/local.py b/tests/sources/local.py
index fb5d36081..4b72a4343 100644
--- a/tests/sources/local.py
+++ b/tests/sources/local.py
@@ -217,4 +217,4 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles):
]
}
}
- _yaml.dump(element, element_path)
+ _yaml.roundtrip_dump(element, element_path)
diff --git a/tests/sources/no_fetch_cached.py b/tests/sources/no_fetch_cached.py
index fcbb42398..81032881c 100644
--- a/tests/sources/no_fetch_cached.py
+++ b/tests/sources/no_fetch_cached.py
@@ -39,7 +39,7 @@ def test_no_fetch_cached(cli, tmpdir, datafiles):
}
]
}
- _yaml.dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
# Test fetch of target with a cached and a non-cached source
result = cli.run(project=project, args=[
diff --git a/tests/sources/pip.py b/tests/sources/pip.py
index cf6ea5498..7f91ba701 100644
--- a/tests/sources/pip.py
+++ b/tests/sources/pip.py
@@ -17,7 +17,7 @@ DATA_DIR = os.path.join(
def generate_project(project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({'name': 'foo'}, project_file)
+ _yaml.roundtrip_dump({'name': 'foo'}, project_file)
# Test that without ref, consistency is set appropriately.
diff --git a/tests/sources/previous_source_access.py b/tests/sources/previous_source_access.py
index 800e0ced5..750b94381 100644
--- a/tests/sources/previous_source_access.py
+++ b/tests/sources/previous_source_access.py
@@ -24,9 +24,9 @@ def test_custom_transform_source(cli, datafiles):
# Set the project_dir alias in project.conf to the path to the tested project
project_config_path = os.path.join(project, "project.conf")
project_config = _yaml.load(project_config_path)
- aliases = _yaml.node_get(project_config, dict, "aliases")
- _yaml.node_set(aliases, "project_dir", "file://{}".format(project))
- _yaml.dump(project_config, project_config_path)
+ aliases = project_config.get_mapping("aliases")
+ aliases["project_dir"] = "file://{}".format(project)
+ _yaml.roundtrip_dump(project_config, project_config_path)
# Ensure we can track
result = cli.run(project=project, args=[
diff --git a/tests/sources/previous_source_access/plugins/sources/foo_transform.py b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
index bec4f9913..4b423a1b3 100644
--- a/tests/sources/previous_source_access/plugins/sources/foo_transform.py
+++ b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
@@ -31,8 +31,8 @@ class FooTransformSource(Source):
return path
def configure(self, node):
- self.node_validate(node, ['ref', *Source.COMMON_CONFIG_KEYS])
- self.ref = self.node_get_member(node, str, 'ref', None)
+ node.validate_keys(['ref', *Source.COMMON_CONFIG_KEYS])
+ self.ref = node.get_str('ref', None)
def preflight(self):
pass
diff --git a/tests/sources/remote.py b/tests/sources/remote.py
index 8b57151b2..5b818b960 100644
--- a/tests/sources/remote.py
+++ b/tests/sources/remote.py
@@ -18,7 +18,7 @@ DATA_DIR = os.path.join(
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
@@ -28,7 +28,7 @@ def generate_project(project_dir, tmpdir):
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': server.base_url()
diff --git a/tests/sources/tar.py b/tests/sources/tar.py
index 8d409b512..9a5559d47 100644
--- a/tests/sources/tar.py
+++ b/tests/sources/tar.py
@@ -47,7 +47,7 @@ def _assemble_tar_lz(workingdir, srcdir, dstfile):
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
@@ -57,7 +57,7 @@ def generate_project(project_dir, tmpdir):
def generate_project_file_server(base_url, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': base_url
@@ -289,7 +289,7 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name):
bst_path = os.path.join(project, "target.bst")
tar_file = "{}.tar.gz".format(tar_name)
- _yaml.dump({
+ _yaml.roundtrip_dump({
'kind': 'import',
'sources': [
{
diff --git a/tests/sources/zip.py b/tests/sources/zip.py
index cb7f440f1..3fd43b4bb 100644
--- a/tests/sources/zip.py
+++ b/tests/sources/zip.py
@@ -32,7 +32,7 @@ def _assemble_zip(workingdir, dstfile):
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
@@ -42,7 +42,7 @@ def generate_project(project_dir, tmpdir):
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.dump({
+ _yaml.roundtrip_dump({
'name': 'foo',
'aliases': {
'tmpdir': server.base_url()
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 9a904f007..9913e880d 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -28,5 +28,4 @@ from .element_generators import create_element_size, update_element_size
from .junction import generate_junction
from .runner_integration import wait_for_cache_granularity
from .python_repo import setup_pypi_repo
-from .yaml import yaml_file_get_provenance
from .platform import override_platform_uname
diff --git a/tests/testutils/element_generators.py b/tests/testutils/element_generators.py
index 38bafc6b6..0fbca7f3e 100644
--- a/tests/testutils/element_generators.py
+++ b/tests/testutils/element_generators.py
@@ -59,7 +59,7 @@ def create_element_size(name, project_dir, elements_path, dependencies, size):
},
'depends': dependencies
}
- _yaml.dump(element, os.path.join(project_dir, elements_path, name))
+ _yaml.roundtrip_dump(element, os.path.join(project_dir, elements_path, name))
# Return the repo, so that it can later be used to add commits
return repo
diff --git a/tests/testutils/junction.py b/tests/testutils/junction.py
index 8132e291c..e867695c4 100644
--- a/tests/testutils/junction.py
+++ b/tests/testutils/junction.py
@@ -30,6 +30,6 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
repo.source_config(ref=source_ref)
]
}
- _yaml.dump(element, junction_path)
+ _yaml.roundtrip_dump(element, junction_path)
return ref
diff --git a/tests/testutils/yaml.py b/tests/testutils/yaml.py
deleted file mode 100644
index a1dcb8646..000000000
--- a/tests/testutils/yaml.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Copyright (C) 2019 Bloomberg Finance LP
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library. If not, see <http://www.gnu.org/licenses/>.
-#
-# Authors:
-# Angelos Evripiotis <jevripiotis@bloomberg.net>
-
-from buildstream import _yaml
-
-
-# yaml_file_get_provenance()
-#
-# Load a yaml file and return its _yaml.ProvenanceInformation object.
-#
-# This is useful for checking the provenance in BuildStream output is as
-# expected.
-#
-# Args:
-# path (str): The path to the file to be loaded
-# shortname (str): How the path should appear in the error
-# key (str): Optional key to look up in the loaded file
-# indices (list of indexes): Optional index path, in the case of list values
-#
-# Returns:
-# The ProvenanceInformation of the dict, member or list element
-#
-def yaml_file_get_provenance(path, shortname, key=None, indices=None):
- file_node = _yaml.load(path, shortname)
- if key:
- required_node = _yaml.node_get(file_node, dict, key, indices=indices)
- else:
- required_node = file_node
- provenance = _yaml.node_get_provenance(required_node)
- assert provenance is not None
- return provenance