summaryrefslogtreecommitdiff
path: root/src/buildstream
diff options
context:
space:
mode:
authorBenjamin Schubert <ben.c.schubert@gmail.com>2019-07-10 13:59:12 +0100
committerbst-marge-bot <marge-bot@buildstream.build>2019-07-15 14:14:03 +0000
commit301d40d1a42c056f7c9e8e734b6ce6251378cafb (patch)
treed3b3f57bf620c54e0796d353ad55f4408c42e2d9 /src/buildstream
parent53019a61c926787b622b6a5f94f81096b043cf99 (diff)
downloadbuildstream-301d40d1a42c056f7c9e8e734b6ce6251378cafb.tar.gz
_yaml: Split Node-related parts into 'node.pyx'
This makes the 'Node' API public, and available for use directly for plugins.
Diffstat (limited to 'src/buildstream')
-rw-r--r--src/buildstream/__init__.py1
-rw-r--r--src/buildstream/_context.py3
-rw-r--r--src/buildstream/_frontend/app.py6
-rw-r--r--src/buildstream/_includes.py9
-rw-r--r--src/buildstream/_loader/loader.py3
-rw-r--r--src/buildstream/_loader/metaelement.py12
-rw-r--r--src/buildstream/_loader/types.pyx28
-rw-r--r--src/buildstream/_options/option.py4
-rw-r--r--src/buildstream/_options/optionpool.py14
-rw-r--r--src/buildstream/_project.py9
-rw-r--r--src/buildstream/_projectrefs.py3
-rw-r--r--src/buildstream/_variables.pyx8
-rw-r--r--src/buildstream/_workspaces.py5
-rw-r--r--src/buildstream/_yaml.pyx970
-rw-r--r--src/buildstream/element.py9
-rw-r--r--src/buildstream/node.pxd (renamed from src/buildstream/_yaml.pxd)5
-rw-r--r--src/buildstream/node.pyx1013
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py3
18 files changed, 1090 insertions, 1015 deletions
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index 62890a62f..cd8d0f1cf 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -29,6 +29,7 @@ if "_BST_COMPLETION" not in os.environ:
from .utils import UtilError, ProgramNotFoundError
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
from .types import Scope, Consistency, CoreWarnings
+ from .node import MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
from .plugin import Plugin
from .source import Source, SourceError, SourceFetcher
from .element import Element, ElementError
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 37af45ca9..c29910418 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -30,6 +30,7 @@ from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
from ._cas import CASCache, CASQuota, CASCacheUsage
from ._workspaces import Workspaces, WorkspaceProjectCache
+from .node import Node
from .sandbox import SandboxRemote
@@ -154,7 +155,7 @@ class Context():
self._artifactcache = None
self._sourcecache = None
self._projects = []
- self._project_overrides = _yaml.Node.from_dict({})
+ self._project_overrides = Node.from_dict({})
self._workspaces = None
self._workspace_project_cache = WorkspaceProjectCache()
self._cascache = None
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 53275d251..2c0dcb1e7 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -38,7 +38,7 @@ from .._exceptions import BstError, StreamError, LoadError, LoadErrorReason, App
from .._message import Message, MessageType, unconditional_messages
from .._stream import Stream
from .._versions import BST_FORMAT_VERSION
-from .. import _yaml
+from .. import node
# Import frontend assets
from .profile import Profile
@@ -349,7 +349,7 @@ class App():
if project_name:
# If project name was specified, user interaction is not desired, just
# perform some validation and write the project.conf
- _yaml.assert_symbol_name(project_name, 'project name')
+ node.assert_symbol_name(project_name, 'project name')
self._assert_format_version(format_version)
self._assert_element_path(element_path)
@@ -801,7 +801,7 @@ class App():
def project_name_proc(user_input):
try:
- _yaml.assert_symbol_name(None, user_input, 'project name')
+ node.assert_symbol_name(None, user_input, 'project name')
except LoadError as e:
message = "{}\n\n{}\n".format(e, e.detail)
raise UsageError(message) from e
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index 90dd8d929..c3bef1ce6 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -1,5 +1,6 @@
import os
from . import _yaml
+from .node import MappingNode, ScalarNode, SequenceNode
from ._exceptions import LoadError, LoadErrorReason
@@ -35,10 +36,10 @@ class Includes:
if current_loader is None:
current_loader = self._loader
- includes_node = node.get_node('(@)', allowed_types=[_yaml.ScalarNode, _yaml.SequenceNode], allow_none=True)
+ includes_node = node.get_node('(@)', allowed_types=[ScalarNode, SequenceNode], allow_none=True)
if includes_node:
- if type(includes_node) is _yaml.ScalarNode: # pylint: disable=unidiomatic-typecheck
+ if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
includes = [includes_node.as_str()]
else:
includes = includes_node.as_str_list()
@@ -132,12 +133,12 @@ class Includes:
only_local=False):
value_type = type(value)
- if value_type is _yaml.MappingNode:
+ if value_type is MappingNode:
self.process(value,
included=included,
current_loader=current_loader,
only_local=only_local)
- elif value_type is _yaml.SequenceNode:
+ elif value_type is SequenceNode:
for v in value:
self._process_value(v,
included=included,
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 27975dc34..5a2624c6a 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -24,6 +24,7 @@ from .._exceptions import LoadError, LoadErrorReason
from .. import Consistency
from .. import _yaml
from ..element import Element
+from ..node import Node
from .._profile import Topics, PROFILER
from .._includes import Includes
@@ -120,7 +121,7 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
- dummy_target = LoadElement(_yaml.Node.from_dict({}), "", self)
+ dummy_target = LoadElement(Node.from_dict({}), "", self)
dummy_target.dependencies.extend(
LoadElement.Dependency(element, Symbol.RUNTIME)
for element in target_elements
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 8214e303d..67d2ec771 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -17,7 +17,7 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
+from ..node import Node
class MetaElement():
@@ -48,12 +48,12 @@ class MetaElement():
self.kind = kind
self.provenance = provenance
self.sources = sources
- self.config = config or _yaml.Node.from_dict({})
- self.variables = variables or _yaml.Node.from_dict({})
- self.environment = environment or _yaml.Node.from_dict({})
+ self.config = config or Node.from_dict({})
+ self.variables = variables or Node.from_dict({})
+ self.environment = environment or Node.from_dict({})
self.env_nocache = env_nocache or []
- self.public = public or _yaml.Node.from_dict({})
- self.sandbox = sandbox or _yaml.Node.from_dict({})
+ self.public = public or Node.from_dict({})
+ self.sandbox = sandbox or Node.from_dict({})
self.build_dependencies = []
self.dependencies = []
self.first_pass = first_pass
diff --git a/src/buildstream/_loader/types.pyx b/src/buildstream/_loader/types.pyx
index fe1cea789..e8c16b36e 100644
--- a/src/buildstream/_loader/types.pyx
+++ b/src/buildstream/_loader/types.pyx
@@ -18,7 +18,7 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
from .._exceptions import LoadError, LoadErrorReason
-from .. cimport _yaml
+from ..node cimport MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
# Symbol():
@@ -59,32 +59,32 @@ class Symbol():
# dependency was declared
#
cdef class Dependency:
- cdef public _yaml.ProvenanceInformation provenance
+ cdef public ProvenanceInformation provenance
cdef public str name
cdef public str dep_type
cdef public str junction
def __init__(self,
- _yaml.Node dep,
+ Node dep,
str default_dep_type=None):
cdef str dep_type
self.provenance = dep.get_provenance()
- if type(dep) is _yaml.ScalarNode:
+ if type(dep) is ScalarNode:
self.name = dep.as_str()
self.dep_type = default_dep_type
self.junction = None
- elif type(dep) is _yaml.MappingNode:
+ elif type(dep) is MappingNode:
if default_dep_type:
- (<_yaml.MappingNode> dep).validate_keys(['filename', 'junction'])
+ (<MappingNode> dep).validate_keys(['filename', 'junction'])
dep_type = default_dep_type
else:
- (<_yaml.MappingNode> dep).validate_keys(['filename', 'type', 'junction'])
+ (<MappingNode> dep).validate_keys(['filename', 'type', 'junction'])
# Make type optional, for this we set it to None
- dep_type = (<_yaml.MappingNode> dep).get_str(<str> Symbol.TYPE, None)
+ dep_type = (<MappingNode> dep).get_str(<str> Symbol.TYPE, None)
if dep_type is None or dep_type == <str> Symbol.ALL:
dep_type = None
elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
@@ -93,9 +93,9 @@ cdef class Dependency:
"{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
.format(provenance, dep_type))
- self.name = (<_yaml.MappingNode> dep).get_str(<str> Symbol.FILENAME)
+ self.name = (<MappingNode> dep).get_str(<str> Symbol.FILENAME)
self.dep_type = dep_type
- self.junction = (<_yaml.MappingNode> dep).get_str(<str> Symbol.JUNCTION, None)
+ self.junction = (<MappingNode> dep).get_str(<str> Symbol.JUNCTION, None)
else:
raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -136,9 +136,9 @@ cdef class Dependency:
# default_dep_type (str): type to give to the dependency
# acc (list): a list in which to add the loaded dependencies
#
-cdef void _extract_depends_from_node(_yaml.Node node, str key, str default_dep_type, list acc) except *:
- cdef _yaml.SequenceNode depends = node.get_sequence(key, [])
- cdef _yaml.Node dep_node
+cdef void _extract_depends_from_node(Node node, str key, str default_dep_type, list acc) except *:
+ cdef SequenceNode depends = node.get_sequence(key, [])
+ cdef Node dep_node
for dep_node in depends:
dependency = Dependency(dep_node, default_dep_type=default_dep_type)
@@ -162,7 +162,7 @@ cdef void _extract_depends_from_node(_yaml.Node node, str key, str default_dep_t
# Returns:
# (list): a list of Dependency objects
#
-def extract_depends_from_node(_yaml.Node node):
+def extract_depends_from_node(Node node):
cdef list acc = []
_extract_depends_from_node(node, <str> Symbol.BUILD_DEPENDS, <str> Symbol.BUILD, acc)
_extract_depends_from_node(node, <str> Symbol.RUNTIME_DEPENDS, <str> Symbol.RUNTIME, acc)
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index ae5d56beb..98090e1b5 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -17,7 +17,7 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .. import _yaml
+from ..node import assert_symbol_name
# Shared symbols for validation purposes
@@ -66,7 +66,7 @@ class Option():
# Assert valid symbol name for variable name
if self.variable is not None:
- _yaml.assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
+ assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
# load_value()
#
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 38e9e6769..56adf68f2 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -20,8 +20,8 @@
import jinja2
-from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason
+from ..node import MappingNode, SequenceNode, assert_symbol_name
from .optionbool import OptionBool
from .optionenum import OptionEnum
from .optionflags import OptionFlags
@@ -68,7 +68,7 @@ class OptionPool():
for option_name, option_definition in options.items():
# Assert that the option name is a valid symbol
- _yaml.assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
+ assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
opt_type_name = option_definition.get_str('type')
try:
@@ -186,9 +186,9 @@ class OptionPool():
#
for value in node.values():
value_type = type(value)
- if value_type is _yaml.MappingNode:
+ if value_type is MappingNode:
self.process_node(value)
- elif value_type is _yaml.SequenceNode:
+ elif value_type is SequenceNode:
self._process_list(value)
#######################################################
@@ -238,9 +238,9 @@ class OptionPool():
def _process_list(self, values):
for value in values:
value_type = type(value)
- if value_type is _yaml.MappingNode:
+ if value_type is MappingNode:
self.process_node(value)
- elif value_type is _yaml.SequenceNode:
+ elif value_type is SequenceNode:
self._process_list(value)
# Process a single conditional, resulting in composition
@@ -278,7 +278,7 @@ class OptionPool():
provenance = condition.get_provenance()
raise LoadError(e.reason, "{}: {}".format(provenance, e)) from e
- if type(value) is not _yaml.MappingNode: # pylint: disable=unidiomatic-typecheck
+ if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
provenance = condition.get_provenance()
raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
"{}: Only values of type 'dict' can be composed.".format(provenance))
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 85ce11e62..a9956421a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -33,6 +33,7 @@ from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
from ._artifactcache import ArtifactCache
from ._sourcecache import SourceCache
+from .node import ScalarNode, SequenceNode, assert_symbol_name
from .sandbox import SandboxRemote
from ._elementfactory import ElementFactory
from ._sourcefactory import SourceFactory
@@ -594,8 +595,8 @@ class Project():
self.name = self._project_conf.get_str('name')
# Validate that project name is a valid symbol name
- _yaml.assert_symbol_name(self.name, "project name",
- ref_node=pre_config_node.get_node('name'))
+ assert_symbol_name(self.name, "project name",
+ ref_node=pre_config_node.get_node('name'))
self.element_path = os.path.join(
self.directory,
@@ -728,7 +729,7 @@ class Project():
# Host files is parsed as a list for convenience
host_files = shell_options.get_sequence('host-files', default=[])
for host_file in host_files:
- if isinstance(host_file, _yaml.ScalarNode):
+ if isinstance(host_file, ScalarNode):
mount = HostMount(host_file)
else:
# Some validation
@@ -823,7 +824,7 @@ class Project():
mirror_name = mirror.get_str('name')
alias_mappings = {}
for alias_mapping, uris in mirror.get_mapping('aliases').items():
- assert type(uris) is _yaml.SequenceNode # pylint: disable=unidiomatic-typecheck
+ assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index d9faca212..0555488c8 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -19,6 +19,7 @@
import os
from . import _yaml
+from .node import _new_synthetic_file
from ._exceptions import LoadError, LoadErrorReason
@@ -79,7 +80,7 @@ class ProjectRefs():
# Ignore failure if the file doesnt exist, it'll be created and
# for now just assumed to be empty
- self._toplevel_node = _yaml._new_synthetic_file(self._fullpath)
+ self._toplevel_node = _new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
self._toplevel_node.validate_keys(['projects'])
diff --git a/src/buildstream/_variables.pyx b/src/buildstream/_variables.pyx
index eb2deb553..470feddc9 100644
--- a/src/buildstream/_variables.pyx
+++ b/src/buildstream/_variables.pyx
@@ -24,7 +24,7 @@ import re
import sys
from ._exceptions import LoadError, LoadErrorReason
-from . cimport _yaml
+from .node cimport MappingNode
# Variables are allowed to have dashes here
#
@@ -65,11 +65,11 @@ PARSE_EXPANSION = re.compile(r"\%\{([a-zA-Z][a-zA-Z0-9_-]*)\}")
#
cdef class Variables:
- cdef _yaml.Node original
+ cdef MappingNode original
cdef dict _expstr_map
cdef public dict flat
- def __init__(self, _yaml.Node node):
+ def __init__(self, MappingNode node):
self.original = node
self._expstr_map = self._resolve(node)
self.flat = self._flatten()
@@ -115,7 +115,7 @@ cdef class Variables:
#
# Here we resolve all of our inputs into a dictionary, ready for use
# in subst()
- cdef dict _resolve(self, _yaml.Node node):
+ cdef dict _resolve(self, MappingNode node):
# Special case, if notparallel is specified in the variables for this
# element, then override max-jobs to be 1.
# Initialize it as a string as all variables are processed as strings.
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 2d693c566..2cda5a215 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -21,6 +21,7 @@ import os
from . import utils
from . import _yaml
+from .node import MappingNode, ScalarNode
from ._exceptions import LoadError, LoadErrorReason
@@ -581,10 +582,10 @@ class Workspaces():
for element, config in workspaces.items():
config_type = type(config)
- if config_type is _yaml.ScalarNode:
+ if config_type is ScalarNode:
pass
- elif config_type is _yaml.MappingNode:
+ elif config_type is MappingNode:
sources = list(config.values())
if len(sources) > 1:
detail = "There are multiple workspaces open for '{}'.\n" + \
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index bf4de7a0e..66f71d97a 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -23,7 +23,6 @@
import datetime
import sys
-import string
from contextlib import ExitStack
from collections import OrderedDict
from collections.abc import Mapping
@@ -31,834 +30,13 @@ from collections.abc import Mapping
from ruamel import yaml
from ._exceptions import LoadError, LoadErrorReason
-
-
-# Without this, pylint complains about all the `type(foo) is blah` checks
-# because it feels isinstance() is more idiomatic. Sadly, it is much slower to
-# do `isinstance(foo, blah)` for reasons I am unable to fathom. As such, we
-# blanket disable the check for this module.
-#
-# pylint: disable=unidiomatic-typecheck
-
-
-# A sentinel to be used as a default argument for functions that need
-# to distinguish between a kwarg set to None and an unset kwarg.
-_sentinel = object()
-
-
-# Node()
-#
-# Container for YAML loaded data and its provenance
-#
-# All nodes returned (and all internal lists/strings) have this type (rather
-# than a plain tuple, to distinguish them in things like node_sanitize)
-#
-# Members:
-# file_index (int): Index within _FILE_LIST (a list of loaded file paths).
-# Negative indices indicate synthetic nodes so that
-# they can be referenced.
-# line (int): The line number within the file where the value appears.
-# col (int): The column number within the file where the value appears.
-#
-cdef class Node:
-
- def __init__(self):
- raise NotImplementedError("Please do not construct nodes like this. Use Node.__new__(Node, *args) instead.")
-
- def __cinit__(self, int file_index, int line, int column, *args):
- self.file_index = file_index
- self.line = line
- self.column = column
-
- def __json__(self):
- raise ValueError("Nodes should not be allowed when jsonify-ing data", self)
-
- #############################################################
- # Public Methods #
- #############################################################
-
- cpdef Node copy(self):
- raise NotImplementedError()
-
- @classmethod
- def from_dict(cls, dict value):
- if value:
- return _new_node_from_dict(value, MappingNode.__new__(
- MappingNode, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), {}))
- else:
- # We got an empty dict, we can shortcut
- return MappingNode.__new__(MappingNode, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), {})
-
- cpdef ProvenanceInformation get_provenance(self):
- return ProvenanceInformation(self)
-
- #############################################################
- # Private Methods used in BuildStream #
- #############################################################
-
- # _assert_fully_composited()
- #
- # This must be called on a fully loaded and composited node,
- # after all composition has completed.
- #
- # This checks that no more composition directives are present
- # in the data.
- #
- # Raises:
- # (LoadError): If any assertions fail
- #
- cpdef void _assert_fully_composited(self) except *:
- raise NotImplementedError()
-
- cpdef object _strip_node_info(self):
- raise NotImplementedError()
-
- #############################################################
- # Protected Methods #
- #############################################################
-
- cdef void _compose_on(self, str key, MappingNode target, list path) except *:
- raise NotImplementedError()
-
- # _is_composite_list
- #
- # Checks if the node is a Mapping with array composition
- # directives.
- #
- # Returns:
- # (bool): True if node was a Mapping containing only
- # list composition directives
- #
- # Raises:
- # (LoadError): If node was a mapping and contained a mix of
- # list composition directives and other keys
- #
- cdef bint _is_composite_list(self) except *:
- raise NotImplementedError()
-
- cdef bint _shares_position_with(self, Node target):
- return self.file_index == target.file_index and self.line == target.line and self.column == target.column
-
- cdef bint _walk_find(self, Node target, list path) except *:
- raise NotImplementedError()
-
-
-cdef class ScalarNode(Node):
-
- def __cinit__(self, int file_index, int line, int column, object value):
- cdef value_type = type(value)
-
- if value_type is str:
- value = value.strip()
- elif value_type is bool:
- if value:
- value = "True"
- else:
- value = "False"
- elif value_type is int:
- value = str(value)
- elif value is None:
- pass
- else:
- raise ValueError("ScalarNode can only hold str, int, bool or None objects")
-
- self.value = value
-
- #############################################################
- # Public Methods #
- #############################################################
-
- cpdef bint as_bool(self) except *:
- if type(self.value) is bool:
- return self.value
-
- # Don't coerce booleans to string, this makes "False" strings evaluate to True
- if self.value in ('True', 'true'):
- return True
- elif self.value in ('False', 'false'):
- return False
- else:
- provenance = self.get_provenance()
- path = provenance._toplevel._find(self)[-1]
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, bool.__name__, self.value))
-
- cpdef int as_int(self) except *:
- try:
- return int(self.value)
- except ValueError:
- provenance = self.get_provenance()
- path = provenance._toplevel._find(self)[-1]
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, int.__name__))
-
- cpdef str as_str(self):
- # We keep 'None' as 'None' to simplify the API's usage and allow chaining for users
- if self.value is None:
- return None
- return str(self.value)
-
- cpdef ScalarNode copy(self):
- return self
-
- cpdef bint is_none(self):
- return self.value is None
-
- #############################################################
- # Private Methods used in BuildStream #
- #############################################################
-
- cpdef void _assert_fully_composited(self) except *:
- pass
-
- cpdef object _strip_node_info(self):
- return self.value
-
- #############################################################
- # Protected Methods #
- #############################################################
-
- cdef void _compose_on(self, str key, MappingNode target, list path) except *:
- cdef Node target_value = target.value.get(key)
-
- if target_value is not None and type(target_value) is not ScalarNode:
- raise CompositeError(path,
- "{}: Cannot compose scalar on non-scalar at {}".format(
- self.get_provenance(),
- target_value.get_provenance()))
-
- target.value[key] = self
-
- cdef bint _is_composite_list(self) except *:
- return False
-
- cdef bint _walk_find(self, Node target, list path) except *:
- return self._shares_position_with(target)
-
-
-cdef class MappingNode(Node):
-
- def __cinit__(self, int file_index, int line, int column, dict value):
- self.value = value
-
- def __contains__(self, what):
- return what in self.value
-
- def __delitem__(self, str key):
- del self.value[key]
-
- def __setitem__(self, str key, object value):
- cdef Node old_value
-
- if type(value) in [MappingNode, ScalarNode, SequenceNode]:
- self.value[key] = value
- else:
- node = _create_node_recursive(value, self)
-
- # FIXME: Do we really want to override provenance?
- #
- # Related to https://gitlab.com/BuildStream/buildstream/issues/1058
- #
- # There are only two cases were nodes are set in the code (hence without provenance):
- # - When automatic variables are set by the core (e-g: max-jobs)
- # - when plugins call Element.set_public_data
- #
- # The first case should never throw errors, so it is of limited interests.
- #
- # The second is more important. What should probably be done here is to have 'set_public_data'
- # able of creating a fake provenance with the name of the plugin, the project and probably the
- # element name.
- #
- # We would therefore have much better error messages, and would be able to get rid of most synthetic
- # nodes.
- old_value = self.value.get(key)
- if old_value:
- node.file_index = old_value.file_index
- node.line = old_value.line
- node.column = old_value.column
-
- self.value[key] = node
-
- #############################################################
- # Public Methods #
- #############################################################
-
- cpdef MappingNode copy(self):
- cdef dict copy = {}
- cdef str key
- cdef Node value
-
- for key, value in self.value.items():
- copy[key] = value.copy()
-
- return MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, copy)
-
- cpdef bint get_bool(self, str key, object default=_sentinel) except *:
- cdef ScalarNode scalar = self.get_scalar(key, default)
- return scalar.as_bool()
-
- cpdef int get_int(self, str key, object default=_sentinel) except *:
- cdef ScalarNode scalar = self.get_scalar(key, default)
- return scalar.as_int()
-
- cpdef MappingNode get_mapping(self, str key, object default=_sentinel):
- value = self._get(key, default, MappingNode)
-
- if type(value) is not MappingNode and value is not None:
- provenance = value.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type 'Mapping'"
- .format(provenance, key))
-
- return value
-
- cpdef Node get_node(self, str key, list allowed_types = None, bint allow_none = False):
- cdef value = self.value.get(key, _sentinel)
-
- if value is _sentinel:
- if allow_none:
- return None
-
- provenance = self.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
-
- if allowed_types and type(value) not in allowed_types:
- provenance = self.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not one of the following: {}.".format(
- provenance, key, ", ".join(allowed_types)))
-
- return value
-
- cpdef ScalarNode get_scalar(self, str key, object default=_sentinel):
- value = self._get(key, default, ScalarNode)
-
- if type(value) is not ScalarNode:
- if value is None:
- value = ScalarNode.__new__(ScalarNode, self.file_index, 0, next_synthetic_counter(), None)
- else:
- provenance = value.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type 'Scalar'"
- .format(provenance, key))
-
- return value
-
- cpdef SequenceNode get_sequence(self, str key, object default=_sentinel):
- value = self._get(key, default, SequenceNode)
-
- if type(value) is not SequenceNode and value is not None:
- provenance = value.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type 'Sequence'"
- .format(provenance, key))
-
- return value
-
- cpdef str get_str(self, str key, object default=_sentinel):
- cdef ScalarNode scalar = self.get_scalar(key, default)
- return scalar.as_str()
-
- cpdef object items(self):
- return self.value.items()
-
- cpdef list keys(self):
- return list(self.value.keys())
-
- cpdef void safe_del(self, str key):
- try:
- del self.value[key]
- except KeyError:
- pass
-
- # validate_keys()
- #
- # Validate the node so as to ensure the user has not specified
- # any keys which are unrecognized by buildstream (usually this
- # means a typo which would otherwise not trigger an error).
- #
- # Args:
- # valid_keys (list): A list of valid keys for the specified node
- #
- # Raises:
- # LoadError: In the case that the specified node contained
- # one or more invalid keys
- #
- cpdef void validate_keys(self, list valid_keys) except *:
- # Probably the fastest way to do this: https://stackoverflow.com/a/23062482
- cdef set valid_keys_set = set(valid_keys)
- cdef str key
-
- for key in self.value:
- if key not in valid_keys_set:
- provenance = self.get_node(key).get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Unexpected key: {}".format(provenance, key))
-
- cpdef object values(self):
- return self.value.values()
-
- #############################################################
- # Private Methods used in BuildStream #
- #############################################################
-
- cpdef void _assert_fully_composited(self) except *:
- cdef str key
- cdef Node value
-
- for key, value in self.value.items():
- # Assert that list composition directives dont remain, this
- # indicates that the user intended to override a list which
- # never existed in the underlying data
- #
- if key in ('(>)', '(<)', '(=)'):
- provenance = value.get_provenance()
- raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
- "{}: Attempt to override non-existing list".format(provenance))
-
- value._assert_fully_composited()
-
- # _composite()
- #
- # Compose one mapping node onto another
- #
- # Args:
- # target (Node): The target to compose into
- #
- # Raises: LoadError
- #
- cpdef void _composite(self, MappingNode target) except *:
- try:
- self.__composite(target, [])
- except CompositeError as e:
- source_provenance = self.get_provenance()
- error_prefix = ""
- if source_provenance:
- error_prefix = "{}: ".format(source_provenance)
- raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
- "{}Failure composing {}: {}"
- .format(error_prefix,
- e.path,
- e.message)) from e
-
- # Like _composite(target, source), but where target overrides source instead.
- #
- cpdef void _composite_under(self, MappingNode target) except *:
- target._composite(self)
-
- cdef str key
- cdef Node value
- cdef list to_delete = [key for key in target.value.keys() if key not in self.value]
-
- for key, value in self.value.items():
- target.value[key] = value
- for key in to_delete:
- del target.value[key]
-
- # _find()
- #
- # Searches the given node tree for the given target node.
- #
- # This is typically used when trying to walk a path to a given node
- # for the purpose of then modifying a similar tree of objects elsewhere
- #
- # Args:
- # target (Node): The node you are looking for in that tree
- #
- # Returns:
- # (list): A path from `node` to `target` or None if `target` is not in the subtree
- cpdef list _find(self, Node target):
- cdef list path = []
- if self._walk_find(target, path):
- return path
- return None
-
- cpdef object _strip_node_info(self):
- cdef str key
- cdef Node value
-
- return {key: value._strip_node_info() for key, value in self.value.items()}
-
- #############################################################
- # Protected Methods #
- #############################################################
-
- cdef void _compose_on(self, str key, MappingNode target, list path) except *:
- cdef Node target_value
-
- if self._is_composite_list():
- if key not in target.value:
- # Composite list clobbers empty space
- target.value[key] = self
- else:
- target_value = target.value[key]
-
- if type(target_value) is SequenceNode:
- # Composite list composes into a list
- self._compose_on_list(target_value)
- elif target_value._is_composite_list():
- # Composite list merges into composite list
- self._compose_on_composite_dict(target_value)
- else:
- # Else composing on top of normal dict or a scalar, so raise...
- raise CompositeError(path,
- "{}: Cannot compose lists onto {}".format(
- self.get_provenance(),
- target_value.get_provenance()))
- else:
- # We're composing a dict into target now
- if key not in target.value:
- # Target lacks a dict at that point, make a fresh one with
- # the same provenance as the incoming dict
- target.value[key] = MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, {})
-
- self.__composite(target.value[key], path)
-
- cdef void _compose_on_list(self, SequenceNode target):
- cdef SequenceNode clobber = self.value.get("(=)")
- cdef SequenceNode prefix = self.value.get("(<)")
- cdef SequenceNode suffix = self.value.get("(>)")
-
- if clobber is not None:
- target.value.clear()
- target.value.extend(clobber.value)
- if prefix is not None:
- for v in reversed(prefix.value):
- target.value.insert(0, v)
- if suffix is not None:
- target.value.extend(suffix.value)
-
- cdef void _compose_on_composite_dict(self, MappingNode target):
- cdef SequenceNode clobber = self.value.get("(=)")
- cdef SequenceNode prefix = self.value.get("(<)")
- cdef SequenceNode suffix = self.value.get("(>)")
-
- if clobber is not None:
- # We want to clobber the target list
- # which basically means replacing the target list
- # with ourselves
- target.value["(=)"] = clobber
- if prefix is not None:
- target.value["(<)"] = prefix
- elif "(<)" in target.value:
- (<SequenceNode> target.value["(<)"]).value.clear()
- if suffix is not None:
- target.value["(>)"] = suffix
- elif "(>)" in target.value:
- (<SequenceNode> target.value["(>)"]).value.clear()
- else:
- # Not clobbering, so prefix the prefix and suffix the suffix
- if prefix is not None:
- if "(<)" in target.value:
- for v in reversed(prefix.value):
- (<SequenceNode> target.value["(<)"]).value.insert(0, v)
- else:
- target.value["(<)"] = prefix
- if suffix is not None:
- if "(>)" in target.value:
- (<SequenceNode> target.value["(>)"]).value.extend(suffix.value)
- else:
- target.value["(>)"] = suffix
-
- cdef Node _get(self, str key, object default, object default_constructor):
- value = self.value.get(key, _sentinel)
-
- if value is _sentinel:
- if default is _sentinel:
- provenance = self.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
-
- if default is None:
- value = None
- else:
- value = default_constructor.__new__(
- default_constructor, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), default)
-
- return value
-
- cdef bint _is_composite_list(self) except *:
- cdef bint has_directives = False
- cdef bint has_keys = False
- cdef str key
-
- for key in self.value.keys():
- if key in ['(>)', '(<)', '(=)']:
- has_directives = True
- else:
- has_keys = True
-
- if has_keys and has_directives:
- provenance = self.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dictionary contains array composition directives and arbitrary keys"
- .format(provenance))
-
- return has_directives
-
- cdef bint _walk_find(self, Node target, list path) except *:
- cdef str k
- cdef Node v
-
- if self._shares_position_with(target):
- return True
-
- for k, v in self.value.items():
- path.append(k)
- if v._walk_find(target, path):
- return True
- del path[-1]
-
- return False
-
- #############################################################
- # Private Methods #
- #############################################################
-
- cdef void __composite(self, MappingNode target, list path=None) except *:
- cdef str key
- cdef Node value
-
- for key, value in self.value.items():
- path.append(key)
- value._compose_on(key, target, path)
- path.pop()
-
-
-cdef class SequenceNode(Node):
- def __cinit__(self, int file_index, int line, int column, list value):
- self.value = value
-
- def __iter__(self):
- return iter(self.value)
-
- def __len__(self):
- return len(self.value)
-
- def __reversed__(self):
- return reversed(self.value)
-
- def __setitem__(self, int key, object value):
- cdef Node old_value
-
- if type(value) in [MappingNode, ScalarNode, SequenceNode]:
- self.value[key] = value
- else:
- node = _create_node_recursive(value, self)
-
- # FIXME: Do we really want to override provenance?
- # See __setitem__ on 'MappingNode' for more context
- old_value = self.value[key]
- if old_value:
- node.file_index = old_value.file_index
- node.line = old_value.line
- node.column = old_value.column
-
- self.value[key] = node
-
- #############################################################
- # Public Methods #
- #############################################################
-
- cpdef void append(self, object value):
- if type(value) in [MappingNode, ScalarNode, SequenceNode]:
- self.value.append(value)
- else:
- node = _create_node_recursive(value, self)
- self.value.append(node)
-
- cpdef list as_str_list(self):
- return [node.as_str() for node in self.value]
-
- cpdef SequenceNode copy(self):
- cdef list copy = []
- cdef Node entry
-
- for entry in self.value:
- copy.append(entry.copy())
-
- return SequenceNode.__new__(SequenceNode, self.file_index, self.line, self.column, copy)
-
- cpdef MappingNode mapping_at(self, int index):
- value = self.value[index]
-
- if type(value) is not MappingNode:
- provenance = self.get_provenance()
- path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, MappingNode.__name__))
- return value
-
- cpdef Node node_at(self, int index, list allowed_types = None):
- cdef value = self.value[index]
-
- if allowed_types and type(value) not in allowed_types:
- provenance = self.get_provenance()
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not one of the following: {}.".format(
- provenance, index, ", ".join(allowed_types)))
-
- return value
-
- cpdef ScalarNode scalar_at(self, int index):
- value = self.value[index]
-
- if type(value) is not ScalarNode:
- provenance = self.get_provenance()
- path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, ScalarNode.__name__))
- return value
-
- cpdef SequenceNode sequence_at(self, int index):
- value = self.value[index]
-
- if type(value) is not SequenceNode:
- provenance = self.get_provenance()
- path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Value of '{}' is not of the expected type '{}'"
- .format(provenance, path, SequenceNode.__name__))
-
- return value
-
- #############################################################
- # Private Methods used in BuildStream #
- #############################################################
-
- cpdef void _assert_fully_composited(self) except *:
- cdef Node value
- for value in self.value:
- value._assert_fully_composited()
-
- cpdef object _strip_node_info(self):
- cdef Node value
- return [value._strip_node_info() for value in self.value]
-
- #############################################################
- # Protected Methods #
- #############################################################
-
- cdef void _compose_on(self, str key, MappingNode target, list path) except *:
- # List clobbers anything list-like
- cdef Node target_value = target.value.get(key)
-
- if not (target_value is None or
- type(target_value) is SequenceNode or
- target_value._is_composite_list()):
- raise CompositeError(path,
- "{}: List cannot overwrite {} at: {}"
- .format(self.get_provenance(),
- key,
- target_value.get_provenance()))
- # Looks good, clobber it
- target.value[key] = self
-
- cdef bint _is_composite_list(self) except *:
- return False
-
- cdef bint _walk_find(self, Node target, list path) except *:
- cdef int i
- cdef Node v
-
- if self._shares_position_with(target):
- return True
-
- for i, v in enumerate(self.value):
- path.append(i)
- if v._walk_find(target, path):
- return True
- del path[-1]
-
- return False
-
-
-# Metadata container for a yaml toplevel node.
-#
-# This class contains metadata around a yaml node in order to be able
-# to trace back the provenance of a node to the file.
-#
-cdef class FileInfo:
-
- cdef str filename, shortname, displayname
- cdef Node toplevel,
- cdef object project
-
- def __init__(self, str filename, str shortname, str displayname, Node toplevel, object project):
- self.filename = filename
- self.shortname = shortname
- self.displayname = displayname
- self.toplevel = toplevel
- self.project = project
-
-
-# File name handling
-cdef _FILE_LIST = []
-
-
-# Purely synthetic node will have _SYNTHETIC_FILE_INDEX for the file number, have line number
-# zero, and a negative column number which comes from inverting the next value
-# out of this counter. Synthetic nodes created with a reference node will
-# have a file number from the reference node, some unknown line number, and
-# a negative column number from this counter.
-cdef int _SYNTHETIC_FILE_INDEX = -1
-cdef int __counter = 0
-
-cdef int next_synthetic_counter():
- global __counter
- __counter -= 1
- return __counter
-
-
-# Returned from Node.get_provenance
-cdef class ProvenanceInformation:
-
- def __init__(self, Node nodeish):
- cdef FileInfo fileinfo
-
- self._node = nodeish
- if (nodeish is None) or (nodeish.file_index == _SYNTHETIC_FILE_INDEX):
- self._filename = ""
- self._shortname = ""
- self._displayname = ""
- self._line = 1
- self._col = 0
- self._toplevel = None
- self._project = None
- else:
- fileinfo = <FileInfo> _FILE_LIST[nodeish.file_index]
- self._filename = fileinfo.filename
- self._shortname = fileinfo.shortname
- self._displayname = fileinfo.displayname
- # We add 1 here to convert from computerish to humanish
- self._line = nodeish.line + 1
- self._col = nodeish.column
- self._toplevel = fileinfo.toplevel
- self._project = fileinfo.project
- self._is_synthetic = (self._filename == '') or (self._col < 0)
-
- # Convert a Provenance to a string for error reporting
- def __str__(self):
- if self._is_synthetic:
- return "{} [synthetic node]".format(self._displayname)
- else:
- return "{} [line {:d} column {:d}]".format(self._displayname, self._line, self._col)
+from . cimport node
+from .node cimport MappingNode, ScalarNode, SequenceNode
# These exceptions are intended to be caught entirely within
# the BuildStream framework, hence they do not reside in the
# public exceptions.py
-class CompositeError(Exception):
- def __init__(self, path, message):
- super().__init__(message)
- self.path = path
- self.message = message
-
class YAMLLoadError(Exception):
pass
@@ -940,7 +118,7 @@ cdef class Representer:
# Returns:
# (Node or None): Return the Node instance of the top level mapping or
# None if there wasn't one.
- cdef Node get_output(self):
+ cdef MappingNode get_output(self):
if len(self.output):
return self.output[0]
return None
@@ -1063,22 +241,6 @@ cdef class Representer:
return RepresenterState.init
-cdef Node _create_node_recursive(object value, Node ref_node):
- cdef value_type = type(value)
-
- if value_type is list:
- node = _new_node_from_list(value, ref_node)
- elif value_type in [int, str, bool]:
- node = ScalarNode.__new__(ScalarNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), value)
- elif value_type is dict:
- node = _new_node_from_dict(value, ref_node)
- else:
- raise ValueError(
- "Unable to assign a value of type {} to a Node.".format(value_type))
-
- return node
-
-
# Loads a dictionary from some YAML
#
# Args:
@@ -1092,7 +254,9 @@ cdef Node _create_node_recursive(object value, Node ref_node):
#
# Raises: LoadError
#
-cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object project=None):
+cpdef MappingNode load(str filename, str shortname=None, bint copy_tree=False, object project=None):
+ cdef MappingNode data
+
if not shortname:
shortname = filename
@@ -1102,10 +266,7 @@ cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object p
else:
displayname = shortname
- cdef Py_ssize_t file_number = len(_FILE_LIST)
- _FILE_LIST.append(FileInfo(filename, shortname, displayname, None, project))
-
- cdef Node data
+ cdef Py_ssize_t file_number = node._create_new_file(filename, shortname, displayname, None, project)
try:
with open(filename) as f:
@@ -1130,9 +291,8 @@ cpdef Node load(str filename, str shortname=None, bint copy_tree=False, object p
# Like load(), but doesnt require the data to be in a file
#
-cpdef Node load_data(str data, int file_index=_SYNTHETIC_FILE_INDEX, str file_name=None, bint copy_tree=False):
+cpdef MappingNode load_data(str data, int file_index=node._SYNTHETIC_FILE_INDEX, str file_name=None, bint copy_tree=False):
cdef Representer rep
- cdef FileInfo f_info
try:
rep = Representer(file_index)
@@ -1162,125 +322,13 @@ cpdef Node load_data(str data, int file_index=_SYNTHETIC_FILE_INDEX, str file_na
.format(type(contents[0]).__name__, file_name))
# Store this away because we'll use it later for "top level" provenance
- if file_index != _SYNTHETIC_FILE_INDEX:
- f_info = <FileInfo> _FILE_LIST[file_index]
-
- _FILE_LIST[file_index] = FileInfo(
- f_info.filename,
- f_info.shortname,
- f_info.displayname,
- contents,
- f_info.project,
- )
+ node._set_root_node_for_file(file_index, contents)
if copy_tree:
contents = contents.copy()
return contents
-# new_synthetic_file()
-#
-# Create a new synthetic mapping node, with an associated file entry
-# (in _FILE_LIST) such that later tracking can correctly determine which
-# file needs writing to in order to persist the changes.
-#
-# Args:
-# filename (str): The name of the synthetic file to create
-# project (Project): The optional project to associate this synthetic file with
-#
-# Returns:
-# (Node): An empty YAML mapping node, whose provenance is to this new
-# synthetic file
-#
-def _new_synthetic_file(str filename, object project=None):
- cdef Py_ssize_t file_index = len(_FILE_LIST)
- cdef Node node = MappingNode.__new__(MappingNode, file_index, 0, 0, {})
-
- _FILE_LIST.append(FileInfo(filename,
- filename,
- "<synthetic {}>".format(filename),
- node,
- project))
- return node
-
-
-# new_node_from_dict()
-#
-# Args:
-# indict (dict): The input dictionary
-#
-# Returns:
-# (Node): A new synthetic YAML tree which represents this dictionary
-#
-cdef Node _new_node_from_dict(dict indict, Node ref_node):
- cdef MappingNode ret = MappingNode.__new__(
- MappingNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), {})
- cdef str k
-
- for k, v in indict.items():
- ret.value[k] = _create_node_recursive(v, ref_node)
-
- return ret
-
-
-# Internal function to help new_node_from_dict() to handle lists
-cdef Node _new_node_from_list(list inlist, Node ref_node):
- cdef SequenceNode ret = SequenceNode.__new__(
- SequenceNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), [])
-
- for v in inlist:
- ret.value.append(_create_node_recursive(v, ref_node))
-
- return ret
-
-
-# assert_symbol_name()
-#
-# A helper function to check if a loaded string is a valid symbol
-# name and to raise a consistent LoadError if not. For strings which
-# are required to be symbols.
-#
-# Args:
-# symbol_name (str): The loaded symbol name
-# purpose (str): The purpose of the string, for an error message
-# ref_node (Node): The node of the loaded symbol, or None
-# allow_dashes (bool): Whether dashes are allowed for this symbol
-#
-# Raises:
-# LoadError: If the symbol_name is invalid
-#
-# Note that dashes are generally preferred for variable names and
-# usage in YAML, but things such as option names which will be
-# evaluated with jinja2 cannot use dashes.
-def assert_symbol_name(str symbol_name, str purpose, *, Node ref_node=None, bint allow_dashes=True):
- cdef str valid_chars = string.digits + string.ascii_letters + '_'
- if allow_dashes:
- valid_chars += '-'
-
- cdef bint valid = True
- if not symbol_name:
- valid = False
- elif any(x not in valid_chars for x in symbol_name):
- valid = False
- elif symbol_name[0] in string.digits:
- valid = False
-
- if not valid:
- detail = "Symbol names must contain only alphanumeric characters, " + \
- "may not start with a digit, and may contain underscores"
- if allow_dashes:
- detail += " or dashes"
-
- message = "Invalid symbol name for {}: '{}'".format(purpose, symbol_name)
- if ref_node:
- provenance = ref_node.get_provenance()
- if provenance is not None:
- message = "{}: {}".format(provenance, message)
-
- raise LoadError(LoadErrorReason.INVALID_SYMBOL_NAME,
- message, detail=detail)
-
-
###############################################################################
# Roundtrip code
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index f54e3bb22..4c6160c0f 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -97,6 +97,7 @@ from . import _cachekey
from . import _signals
from . import _site
from ._platform import Platform
+from .node import Node, _sentinel as _node_sentinel
from .plugin import Plugin
from .sandbox import SandboxFlags, SandboxCommandError
from .sandbox._config import SandboxConfig
@@ -487,7 +488,7 @@ class Element(Plugin):
def substitute_variables(self, value):
return self.__variables.subst(value)
- def node_subst_member(self, node, member_name, default=_yaml._sentinel):
+ def node_subst_member(self, node, member_name, default=_node_sentinel):
"""Fetch the value of a string node member, substituting any variables
in the loaded value with the element contextual variables.
@@ -2510,7 +2511,7 @@ class Element(Plugin):
# Defaults are loaded once per class and then reused
#
if cls.__defaults is None:
- defaults = _yaml.Node.from_dict({})
+ defaults = Node.from_dict({})
if plugin_conf is not None:
# Load the plugin's accompanying .yaml file if one was provided
@@ -2545,7 +2546,7 @@ class Element(Plugin):
default_env = cls.__defaults.get_mapping("environment", default={})
if meta.is_junction:
- environment = _yaml.Node.from_dict({})
+ environment = Node.from_dict({})
else:
environment = project.base_environment.copy()
@@ -2633,7 +2634,7 @@ class Element(Plugin):
@classmethod
def __extract_sandbox_config(cls, project, meta):
if meta.is_junction:
- sandbox_config = _yaml.Node.from_dict({
+ sandbox_config = Node.from_dict({
'build-uid': 0,
'build-gid': 0
})
diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/node.pxd
index 2e423caed..3069b2fc9 100644
--- a/src/buildstream/_yaml.pxd
+++ b/src/buildstream/node.pxd
@@ -105,3 +105,8 @@ cdef class ProvenanceInformation:
cdef readonly str _shortname
cdef readonly int _col
cdef readonly int _line
+
+
+cdef int _SYNTHETIC_FILE_INDEX
+cdef Py_ssize_t _create_new_file(str filename, str shortname, str displayname, Node toplevel, object project)
+cdef void _set_root_node_for_file(Py_ssize_t file_index, MappingNode contents) except *
diff --git a/src/buildstream/node.pyx b/src/buildstream/node.pyx
new file mode 100644
index 000000000..30814b782
--- /dev/null
+++ b/src/buildstream/node.pyx
@@ -0,0 +1,1013 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg LLP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+# Daniel Silverstone <daniel.silverstone@codethink.co.uk>
+# James Ennis <james.ennis@codethink.co.uk>
+# Benjamin Schubert <bschubert@bloomberg.net>
+
+import string
+
+from ._exceptions import LoadError, LoadErrorReason
+
+
+# A sentinel to be used as a default argument for functions that need
+# to distinguish between a kwarg set to None and an unset kwarg.
+_sentinel = object()
+
+
+# Node()
+#
+# Container for YAML loaded data and its provenance
+#
+# All nodes returned (and all internal lists/strings) have this type (rather
+# than a plain tuple, to distinguish them in things like node_sanitize)
+#
+# Members:
+# file_index (int): Index within _FILE_LIST (a list of loaded file paths).
+# Negative indices indicate synthetic nodes so that
+# they can be referenced.
+# line (int): The line number within the file where the value appears.
+# col (int): The column number within the file where the value appears.
+#
+cdef class Node:
+
+ def __init__(self):
+ raise NotImplementedError("Please do not construct nodes like this. Use Node.from_dict(dict) instead.")
+
+ def __cinit__(self, int file_index, int line, int column, *args):
+ self.file_index = file_index
+ self.line = line
+ self.column = column
+
+ def __json__(self):
+ raise ValueError("Nodes should not be allowed when jsonify-ing data", self)
+
+ #############################################################
+ # Abstract Public Methods #
+ #############################################################
+
+ cpdef Node copy(self):
+ raise NotImplementedError()
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ @classmethod
+ def from_dict(cls, dict value):
+ if value:
+ return _new_node_from_dict(value, MappingNode.__new__(
+ MappingNode, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), {}))
+ else:
+ # We got an empty dict, we can shortcut
+ return MappingNode.__new__(MappingNode, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), {})
+
+ cpdef ProvenanceInformation get_provenance(self):
+ return ProvenanceInformation(self)
+
+ #############################################################
+ # Abstract Private Methods used in BuildStream #
+ #############################################################
+
+ # _assert_fully_composited()
+ #
+ # This must be called on a fully loaded and composited node,
+ # after all composition has completed.
+ #
+ # This checks that no more composition directives are present
+ # in the data.
+ #
+ # Raises:
+ # (LoadError): If any assertions fail
+ #
+ cpdef void _assert_fully_composited(self) except *:
+ raise NotImplementedError()
+
+ cpdef object _strip_node_info(self):
+ raise NotImplementedError()
+
+ #############################################################
+ # Abstract Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ raise NotImplementedError()
+
+ # _is_composite_list
+ #
+ # Checks if the node is a Mapping with array composition
+ # directives.
+ #
+ # Returns:
+ # (bool): True if node was a Mapping containing only
+ # list composition directives
+ #
+ # Raises:
+ # (LoadError): If node was a mapping and contained a mix of
+ # list composition directives and other keys
+ #
+ cdef bint _is_composite_list(self) except *:
+ raise NotImplementedError()
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ raise NotImplementedError()
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef bint _shares_position_with(self, Node target):
+ return (self.file_index == target.file_index and
+ self.line == target.line and
+ self.column == target.column)
+
+
+cdef class ScalarNode(Node):
+
+ def __cinit__(self, int file_index, int line, int column, object value):
+ cdef value_type = type(value)
+
+ if value_type is str:
+ value = value.strip()
+ elif value_type is bool:
+ if value:
+ value = "True"
+ else:
+ value = "False"
+ elif value_type is int:
+ value = str(value)
+ elif value is None:
+ pass
+ else:
+ raise ValueError("ScalarNode can only hold str, int, bool or None objects")
+
+ self.value = value
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef bint as_bool(self) except *:
+ if type(self.value) is bool:
+ return self.value
+
+ # Don't coerce strings to booleans, this makes "False" strings evaluate to True
+ if self.value in ('True', 'true'):
+ return True
+ elif self.value in ('False', 'false'):
+ return False
+ else:
+ provenance = self.get_provenance()
+ path = provenance._toplevel._find(self)[-1]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, bool.__name__, self.value))
+
+ cpdef int as_int(self) except *:
+ try:
+ return int(self.value)
+ except ValueError:
+ provenance = self.get_provenance()
+ path = provenance._toplevel._find(self)[-1]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, int.__name__))
+
+ cpdef str as_str(self):
+ # We keep 'None' as 'None' to simplify the API's usage and allow chaining for users
+ if self.value is None:
+ return None
+ return str(self.value)
+
+ cpdef bint is_none(self):
+ return self.value is None
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef ScalarNode copy(self):
+ return self
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ pass
+
+ cpdef object _strip_node_info(self):
+ return self.value
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ cdef Node target_value = target.value.get(key)
+
+ if target_value is not None and type(target_value) is not ScalarNode:
+ raise _CompositeError(path,
+ "{}: Cannot compose scalar on non-scalar at {}".format(
+ self.get_provenance(),
+ target_value.get_provenance()))
+
+ target.value[key] = self
+
+ cdef bint _is_composite_list(self) except *:
+ return False
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ return self._shares_position_with(target)
+
+
+cdef class MappingNode(Node):
+
+ def __cinit__(self, int file_index, int line, int column, dict value):
+ self.value = value
+
+ def __contains__(self, what):
+ return what in self.value
+
+ def __delitem__(self, str key):
+ del self.value[key]
+
+ def __setitem__(self, str key, object value):
+ cdef Node old_value
+
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = _create_node_recursive(value, self)
+
+ # FIXME: Do we really want to override provenance?
+ #
+ # Related to https://gitlab.com/BuildStream/buildstream/issues/1058
+ #
+ # There are only two cases were nodes are set in the code (hence without provenance):
+ # - When automatic variables are set by the core (e-g: max-jobs)
+ # - when plugins call Element.set_public_data
+ #
+ # The first case should never throw errors, so it is of limited interests.
+ #
+ # The second is more important. What should probably be done here is to have 'set_public_data'
+ # able of creating a fake provenance with the name of the plugin, the project and probably the
+ # element name.
+ #
+ # We would therefore have much better error messages, and would be able to get rid of most synthetic
+ # nodes.
+ old_value = self.value.get(key)
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef bint get_bool(self, str key, object default=_sentinel) except *:
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_bool()
+
+ cpdef int get_int(self, str key, object default=_sentinel) except *:
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_int()
+
+ cpdef MappingNode get_mapping(self, str key, object default=_sentinel):
+ value = self._get(key, default, MappingNode)
+
+ if type(value) is not MappingNode and value is not None:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'Mapping'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef Node get_node(self, str key, list allowed_types = None, bint allow_none = False):
+ cdef value = self.value.get(key, _sentinel)
+
+ if value is _sentinel:
+ if allow_none:
+ return None
+
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+
+ if allowed_types and type(value) not in allowed_types:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not one of the following: {}.".format(
+ provenance, key, ", ".join(allowed_types)))
+
+ return value
+
+ cpdef ScalarNode get_scalar(self, str key, object default=_sentinel):
+ value = self._get(key, default, ScalarNode)
+
+ if type(value) is not ScalarNode:
+ if value is None:
+ value = ScalarNode.__new__(ScalarNode, self.file_index, 0, next_synthetic_counter(), None)
+ else:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'Scalar'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef SequenceNode get_sequence(self, str key, object default=_sentinel):
+ value = self._get(key, default, SequenceNode)
+
+ if type(value) is not SequenceNode and value is not None:
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type 'Sequence'"
+ .format(provenance, key))
+
+ return value
+
+ cpdef str get_str(self, str key, object default=_sentinel):
+ cdef ScalarNode scalar = self.get_scalar(key, default)
+ return scalar.as_str()
+
+ cpdef object items(self):
+ return self.value.items()
+
+ cpdef list keys(self):
+ return list(self.value.keys())
+
+ cpdef void safe_del(self, str key):
+ try:
+ del self.value[key]
+ except KeyError:
+ pass
+
+ # validate_keys()
+ #
+ # Validate the node so as to ensure the user has not specified
+ # any keys which are unrecognized by buildstream (usually this
+ # means a typo which would otherwise not trigger an error).
+ #
+ # Args:
+ # valid_keys (list): A list of valid keys for the specified node
+ #
+ # Raises:
+ # LoadError: In the case that the specified node contained
+ # one or more invalid keys
+ #
+ cpdef void validate_keys(self, list valid_keys) except *:
+ # Probably the fastest way to do this: https://stackoverflow.com/a/23062482
+ cdef set valid_keys_set = set(valid_keys)
+ cdef str key
+
+ for key in self.value:
+ if key not in valid_keys_set:
+ provenance = self.get_node(key).get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Unexpected key: {}".format(provenance, key))
+
+ cpdef object values(self):
+ return self.value.values()
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef MappingNode copy(self):
+ cdef dict copy = {}
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ copy[key] = value.copy()
+
+ return MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, copy)
+
+ #############################################################
+ # Private Methods used in BuildStream #
+ #############################################################
+
+ # _composite()
+ #
+ # Compose one mapping node onto another
+ #
+ # Args:
+ # target (Node): The target to compose into
+ #
+ # Raises: LoadError
+ #
+ cpdef void _composite(self, MappingNode target) except *:
+ try:
+ self.__composite(target, [])
+ except _CompositeError as e:
+ source_provenance = self.get_provenance()
+ error_prefix = ""
+ if source_provenance:
+ error_prefix = "{}: ".format(source_provenance)
+ raise LoadError(LoadErrorReason.ILLEGAL_COMPOSITE,
+ "{}Failure composing {}: {}"
+ .format(error_prefix,
+ e.path,
+ e.message)) from e
+
+ # Like _composite(target, source), but where target overrides source instead.
+ #
+ cpdef void _composite_under(self, MappingNode target) except *:
+ target._composite(self)
+
+ cdef str key
+ cdef Node value
+ cdef list to_delete = [key for key in target.value.keys() if key not in self.value]
+
+ for key, value in self.value.items():
+ target.value[key] = value
+ for key in to_delete:
+ del target.value[key]
+
+ # _find()
+ #
+ # Searches the given node tree for the given target node.
+ #
+ # This is typically used when trying to walk a path to a given node
+ # for the purpose of then modifying a similar tree of objects elsewhere
+ #
+ # Args:
+ # target (Node): The node you are looking for in that tree
+ #
+ # Returns:
+ # (list): A path from `node` to `target` or None if `target` is not in the subtree
+ cpdef list _find(self, Node target):
+ cdef list path = []
+ if self._walk_find(target, path):
+ return path
+ return None
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ # Assert that list composition directives dont remain, this
+ # indicates that the user intended to override a list which
+ # never existed in the underlying data
+ #
+ if key in ('(>)', '(<)', '(=)'):
+ provenance = value.get_provenance()
+ raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
+ "{}: Attempt to override non-existing list".format(provenance))
+
+ value._assert_fully_composited()
+
+ cpdef object _strip_node_info(self):
+ cdef str key
+ cdef Node value
+
+ return {key: value._strip_node_info() for key, value in self.value.items()}
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ cdef Node target_value
+
+ if self._is_composite_list():
+ if key not in target.value:
+ # Composite list clobbers empty space
+ target.value[key] = self
+ else:
+ target_value = target.value[key]
+
+ if type(target_value) is SequenceNode:
+ # Composite list composes into a list
+ self._compose_on_list(target_value)
+ elif target_value._is_composite_list():
+ # Composite list merges into composite list
+ self._compose_on_composite_dict(target_value)
+ else:
+ # Else composing on top of normal dict or a scalar, so raise...
+ raise _CompositeError(path,
+ "{}: Cannot compose lists onto {}".format(
+ self.get_provenance(),
+ target_value.get_provenance()))
+ else:
+ # We're composing a dict into target now
+ if key not in target.value:
+ # Target lacks a dict at that point, make a fresh one with
+ # the same provenance as the incoming dict
+ target.value[key] = MappingNode.__new__(MappingNode, self.file_index, self.line, self.column, {})
+
+ self.__composite(target.value[key], path)
+
+ cdef void _compose_on_list(self, SequenceNode target):
+ cdef SequenceNode clobber = self.value.get("(=)")
+ cdef SequenceNode prefix = self.value.get("(<)")
+ cdef SequenceNode suffix = self.value.get("(>)")
+
+ if clobber is not None:
+ target.value.clear()
+ target.value.extend(clobber.value)
+ if prefix is not None:
+ for v in reversed(prefix.value):
+ target.value.insert(0, v)
+ if suffix is not None:
+ target.value.extend(suffix.value)
+
+ cdef void _compose_on_composite_dict(self, MappingNode target):
+ cdef SequenceNode clobber = self.value.get("(=)")
+ cdef SequenceNode prefix = self.value.get("(<)")
+ cdef SequenceNode suffix = self.value.get("(>)")
+
+ if clobber is not None:
+ # We want to clobber the target list
+ # which basically means replacing the target list
+ # with ourselves
+ target.value["(=)"] = clobber
+ if prefix is not None:
+ target.value["(<)"] = prefix
+ elif "(<)" in target.value:
+ (<SequenceNode> target.value["(<)"]).value.clear()
+ if suffix is not None:
+ target.value["(>)"] = suffix
+ elif "(>)" in target.value:
+ (<SequenceNode> target.value["(>)"]).value.clear()
+ else:
+ # Not clobbering, so prefix the prefix and suffix the suffix
+ if prefix is not None:
+ if "(<)" in target.value:
+ for v in reversed(prefix.value):
+ (<SequenceNode> target.value["(<)"]).value.insert(0, v)
+ else:
+ target.value["(<)"] = prefix
+ if suffix is not None:
+ if "(>)" in target.value:
+ (<SequenceNode> target.value["(>)"]).value.extend(suffix.value)
+ else:
+ target.value["(>)"] = suffix
+
+ cdef Node _get(self, str key, object default, object default_constructor):
+ value = self.value.get(key, _sentinel)
+
+ if value is _sentinel:
+ if default is _sentinel:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary did not contain expected key '{}'".format(provenance, key))
+
+ if default is None:
+ value = None
+ else:
+ value = default_constructor.__new__(
+ default_constructor, _SYNTHETIC_FILE_INDEX, 0, next_synthetic_counter(), default)
+
+ return value
+
+ cdef bint _is_composite_list(self) except *:
+ cdef bint has_directives = False
+ cdef bint has_keys = False
+ cdef str key
+
+ for key in self.value.keys():
+ if key in ['(>)', '(<)', '(=)']:
+ has_directives = True
+ else:
+ has_keys = True
+
+ if has_keys and has_directives:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dictionary contains array composition directives and arbitrary keys"
+ .format(provenance))
+
+ return has_directives
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ cdef str k
+ cdef Node v
+
+ if self._shares_position_with(target):
+ return True
+
+ for k, v in self.value.items():
+ path.append(k)
+ if v._walk_find(target, path):
+ return True
+ del path[-1]
+
+ return False
+
+ #############################################################
+ # Private Methods #
+ #############################################################
+
+ cdef void __composite(self, MappingNode target, list path=None) except *:
+ cdef str key
+ cdef Node value
+
+ for key, value in self.value.items():
+ path.append(key)
+ value._compose_on(key, target, path)
+ path.pop()
+
+
+cdef class SequenceNode(Node):
+ def __cinit__(self, int file_index, int line, int column, list value):
+ self.value = value
+
+ def __iter__(self):
+ return iter(self.value)
+
+ def __len__(self):
+ return len(self.value)
+
+ def __reversed__(self):
+ return reversed(self.value)
+
+ def __setitem__(self, int key, object value):
+ cdef Node old_value
+
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value[key] = value
+ else:
+ node = _create_node_recursive(value, self)
+
+ # FIXME: Do we really want to override provenance?
+ # See __setitem__ on 'MappingNode' for more context
+ old_value = self.value[key]
+ if old_value:
+ node.file_index = old_value.file_index
+ node.line = old_value.line
+ node.column = old_value.column
+
+ self.value[key] = node
+
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ cpdef void append(self, object value):
+ if type(value) in [MappingNode, ScalarNode, SequenceNode]:
+ self.value.append(value)
+ else:
+ node = _create_node_recursive(value, self)
+ self.value.append(node)
+
+ cpdef list as_str_list(self):
+ return [node.as_str() for node in self.value]
+
+ cpdef MappingNode mapping_at(self, int index):
+ value = self.value[index]
+
+ if type(value) is not MappingNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, MappingNode.__name__))
+ return value
+
+ cpdef Node node_at(self, int index, list allowed_types = None):
+ cdef value = self.value[index]
+
+ if allowed_types and type(value) not in allowed_types:
+ provenance = self.get_provenance()
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not one of the following: {}.".format(
+ provenance, index, ", ".join(allowed_types)))
+
+ return value
+
+ cpdef ScalarNode scalar_at(self, int index):
+ value = self.value[index]
+
+ if type(value) is not ScalarNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, ScalarNode.__name__))
+ return value
+
+ cpdef SequenceNode sequence_at(self, int index):
+ value = self.value[index]
+
+ if type(value) is not SequenceNode:
+ provenance = self.get_provenance()
+ path = ["[{}]".format(p) for p in provenance.toplevel._find(self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, SequenceNode.__name__))
+
+ return value
+
+ #############################################################
+ # Public Methods implementations #
+ #############################################################
+
+ cpdef SequenceNode copy(self):
+ cdef list copy = []
+ cdef Node entry
+
+ for entry in self.value:
+ copy.append(entry.copy())
+
+ return SequenceNode.__new__(SequenceNode, self.file_index, self.line, self.column, copy)
+
+ #############################################################
+ # Private Methods implementations #
+ #############################################################
+
+ cpdef void _assert_fully_composited(self) except *:
+ cdef Node value
+ for value in self.value:
+ value._assert_fully_composited()
+
+ cpdef object _strip_node_info(self):
+ cdef Node value
+ return [value._strip_node_info() for value in self.value]
+
+ #############################################################
+ # Protected Methods #
+ #############################################################
+
+ cdef void _compose_on(self, str key, MappingNode target, list path) except *:
+ # List clobbers anything list-like
+ cdef Node target_value = target.value.get(key)
+
+ if not (target_value is None or
+ type(target_value) is SequenceNode or
+ target_value._is_composite_list()):
+ raise _CompositeError(path,
+ "{}: List cannot overwrite {} at: {}"
+ .format(self.get_provenance(),
+ key,
+ target_value.get_provenance()))
+ # Looks good, clobber it
+ target.value[key] = self
+
+ cdef bint _is_composite_list(self) except *:
+ return False
+
+ cdef bint _walk_find(self, Node target, list path) except *:
+ cdef int i
+ cdef Node v
+
+ if self._shares_position_with(target):
+ return True
+
+ for i, v in enumerate(self.value):
+ path.append(i)
+ if v._walk_find(target, path):
+ return True
+ del path[-1]
+
+ return False
+
+
+# Returned from Node.get_provenance
+cdef class ProvenanceInformation:
+
+ def __init__(self, Node nodeish):
+ cdef _FileInfo fileinfo
+
+ self._node = nodeish
+ if (nodeish is None) or (nodeish.file_index == _SYNTHETIC_FILE_INDEX):
+ self._filename = ""
+ self._shortname = ""
+ self._displayname = ""
+ self._line = 1
+ self._col = 0
+ self._toplevel = None
+ self._project = None
+ else:
+ fileinfo = <_FileInfo> _FILE_LIST[nodeish.file_index]
+ self._filename = fileinfo.filename
+ self._shortname = fileinfo.shortname
+ self._displayname = fileinfo.displayname
+ # We add 1 here to convert from computerish to humanish
+ self._line = nodeish.line + 1
+ self._col = nodeish.column
+ self._toplevel = fileinfo.toplevel
+ self._project = fileinfo.project
+ self._is_synthetic = (self._filename == '') or (self._col < 0)
+
+ # Convert a Provenance to a string for error reporting
+ def __str__(self):
+ if self._is_synthetic:
+ return "{} [synthetic node]".format(self._displayname)
+ else:
+ return "{} [line {:d} column {:d}]".format(self._displayname, self._line, self._col)
+
+
+# assert_symbol_name()
+#
+# A helper function to check if a loaded string is a valid symbol
+# name and to raise a consistent LoadError if not. For strings which
+# are required to be symbols.
+#
+# Args:
+# symbol_name (str): The loaded symbol name
+# purpose (str): The purpose of the string, for an error message
+# ref_node (Node): The node of the loaded symbol, or None
+# allow_dashes (bool): Whether dashes are allowed for this symbol
+#
+# Raises:
+# LoadError: If the symbol_name is invalid
+#
+# Note that dashes are generally preferred for variable names and
+# usage in YAML, but things such as option names which will be
+# evaluated with jinja2 cannot use dashes.
+def assert_symbol_name(str symbol_name, str purpose, *, Node ref_node=None, bint allow_dashes=True):
+ cdef str valid_chars = string.digits + string.ascii_letters + '_'
+ if allow_dashes:
+ valid_chars += '-'
+
+ cdef bint valid = True
+ if not symbol_name:
+ valid = False
+ elif any(x not in valid_chars for x in symbol_name):
+ valid = False
+ elif symbol_name[0] in string.digits:
+ valid = False
+
+ if not valid:
+ detail = "Symbol names must contain only alphanumeric characters, " + \
+ "may not start with a digit, and may contain underscores"
+ if allow_dashes:
+ detail += " or dashes"
+
+ message = "Invalid symbol name for {}: '{}'".format(purpose, symbol_name)
+ if ref_node:
+ provenance = ref_node.get_provenance()
+ if provenance is not None:
+ message = "{}: {}".format(provenance, message)
+
+ raise LoadError(LoadErrorReason.INVALID_SYMBOL_NAME,
+ message, detail=detail)
+
+
+#############################################################
+# BuildStream Private methods #
+#############################################################
+# Purely synthetic nodes will have _SYNTHETIC_FILE_INDEX for the file number, have line number
+# zero, and a negative column number which comes from inverting the next value
+# out of this counter. Synthetic nodes created with a reference node will
+# have a file number from the reference node, some unknown line number, and
+# a negative column number from this counter.
+cdef int _SYNTHETIC_FILE_INDEX = -1
+
+# File name handling
+cdef list _FILE_LIST = []
+
+
+cdef Py_ssize_t _create_new_file(str filename, str shortname, str displayname, Node toplevel, object project):
+ cdef Py_ssize_t file_number = len(_FILE_LIST)
+ _FILE_LIST.append(_FileInfo(filename, shortname, displayname, None, project))
+
+ return file_number
+
+
+cdef void _set_root_node_for_file(Py_ssize_t file_index, MappingNode contents) except *:
+ cdef _FileInfo f_info
+
+ if file_index != _SYNTHETIC_FILE_INDEX:
+ f_info = <_FileInfo> _FILE_LIST[file_index]
+ f_info.toplevel = contents
+
+
+# _new_synthetic_file()
+#
+# Create a new synthetic mapping node, with an associated file entry
+# (in _FILE_LIST) such that later tracking can correctly determine which
+# file needs writing to in order to persist the changes.
+#
+# Args:
+# filename (str): The name of the synthetic file to create
+# project (Project): The optional project to associate this synthetic file with
+#
+# Returns:
+# (Node): An empty YAML mapping node, whose provenance is to this new
+# synthetic file
+#
+def _new_synthetic_file(str filename, object project=None):
+ cdef Py_ssize_t file_index = len(_FILE_LIST)
+ cdef Node node = MappingNode.__new__(MappingNode, file_index, 0, 0, {})
+
+ _FILE_LIST.append(_FileInfo(filename,
+ filename,
+ "<synthetic {}>".format(filename),
+ node,
+ project))
+ return node
+
+
+#############################################################
+# Module local helper Methods #
+#############################################################
+
+# synthetic counter for synthetic nodes
+cdef int __counter = 0
+
+
+class _CompositeError(Exception):
+ def __init__(self, path, message):
+ super().__init__(message)
+ self.path = path
+ self.message = message
+
+
+# Metadata container for a yaml toplevel node.
+#
+# This class contains metadata around a yaml node in order to be able
+# to trace back the provenance of a node to the file.
+#
+cdef class _FileInfo:
+
+ cdef str filename, shortname, displayname
+ cdef MappingNode toplevel,
+ cdef object project
+
+ def __init__(self, str filename, str shortname, str displayname, MappingNode toplevel, object project):
+ self.filename = filename
+ self.shortname = shortname
+ self.displayname = displayname
+ self.toplevel = toplevel
+ self.project = project
+
+
+cdef int next_synthetic_counter():
+ global __counter
+ __counter -= 1
+ return __counter
+
+
+cdef Node _create_node_recursive(object value, Node ref_node):
+ cdef value_type = type(value)
+
+ if value_type is list:
+ node = _new_node_from_list(value, ref_node)
+ elif value_type in [int, str, bool]:
+ node = ScalarNode.__new__(ScalarNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), value)
+ elif value_type is dict:
+ node = _new_node_from_dict(value, ref_node)
+ else:
+ raise ValueError(
+ "Unable to assign a value of type {} to a Node.".format(value_type))
+
+ return node
+
+
+# _new_node_from_dict()
+#
+# Args:
+# indict (dict): The input dictionary
+# ref_node (Node): The dictionary to take as reference for position
+#
+# Returns:
+# (Node): A new synthetic YAML tree which represents this dictionary
+#
+cdef Node _new_node_from_dict(dict indict, Node ref_node):
+ cdef MappingNode ret = MappingNode.__new__(
+ MappingNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), {})
+ cdef str k
+
+ for k, v in indict.items():
+ ret.value[k] = _create_node_recursive(v, ref_node)
+
+ return ret
+
+
+# Internal function to help new_node_from_dict() to handle lists
+cdef Node _new_node_from_list(list inlist, Node ref_node):
+ cdef SequenceNode ret = SequenceNode.__new__(
+ SequenceNode, ref_node.file_index, ref_node.line, next_synthetic_counter(), [])
+
+ for v in inlist:
+ ret.value.append(_create_node_recursive(v, ref_node))
+
+ return ret
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index fb45c72ef..20298c1ce 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -27,6 +27,7 @@ from functools import partial
import grpc
from .. import utils
+from ..node import Node
from .._message import Message, MessageType
from .sandbox import Sandbox, SandboxCommandError, _SandboxBatch
from ..storage.directory import VirtualDirectoryError
@@ -144,7 +145,7 @@ class SandboxRemote(Sandbox):
# 'url' was the only valid key for remote-execution:
if 'url' in remote_config:
if 'execution-service' not in remote_config:
- exec_config = _yaml.Node.from_dict({'url': remote_config['url']})
+ exec_config = Node.from_dict({'url': remote_config['url']})
else:
provenance = remote_config.get_node('url').get_provenance()
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,