summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenjamin Schubert <ben.c.schubert@gmail.com>2019-06-11 18:19:46 +0100
committerBenjamin Schubert <ben.c.schubert@gmail.com>2019-06-13 16:04:31 +0100
commit7d642bb85b4b102e4a3578585e508bc855fec77d (patch)
tree87ff18216995ef35293b7da21d50d37fadec1291
parenta14c8c827128c7a3213955fde48ca8961099dab8 (diff)
downloadbuildstream-7d642bb85b4b102e4a3578585e508bc855fec77d.tar.gz
Introduce get_sequence and sequence_at/mapping_At
-rw-r--r--src/buildstream/_basecache.py2
-rw-r--r--src/buildstream/_gitsourcebase.py4
-rw-r--r--src/buildstream/_includes.py2
-rw-r--r--src/buildstream/_loader/loader.py8
-rw-r--r--src/buildstream/_loader/types.pyx14
-rw-r--r--src/buildstream/_options/optionenum.py2
-rw-r--r--src/buildstream/_options/optionflags.py6
-rw-r--r--src/buildstream/_options/optionpool.py2
-rw-r--r--src/buildstream/_plugincontext.py2
-rw-r--r--src/buildstream/_project.py14
-rw-r--r--src/buildstream/_projectrefs.py6
-rw-r--r--src/buildstream/_yaml.pxd7
-rw-r--r--src/buildstream/_yaml.pyx53
-rw-r--r--src/buildstream/buildelement.py13
-rw-r--r--src/buildstream/element.py11
-rw-r--r--tests/format/include.py4
-rw-r--r--tests/format/include_composition.py18
-rw-r--r--tests/format/options.py10
-rw-r--r--tests/format/projectoverrides.py2
19 files changed, 118 insertions, 62 deletions
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index 5cfd7e552..e8923c7b1 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -77,7 +77,7 @@ class BaseCache():
artifacts = [config_node.get_mapping(cls.config_node_name)]
except LoadError:
try:
- artifacts = _yaml.node_get(config_node, list, cls.config_node_name, default_value=[])
+ artifacts = config_node.get_sequence(cls.config_node_name, default=[])
except LoadError:
provenance = _yaml.node_get_provenance(config_node, key=cls.config_node_name)
raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 0b88ebd37..6ba6aa50d 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -383,7 +383,7 @@ class _GitSourceBase(Source):
'track-tags', 'tags']
self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
- tags_node = self.node_get_member(node, list, 'tags', [])
+ tags_node = node.get_sequence('tags', [])
for tag_node in tags_node:
self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
@@ -663,7 +663,7 @@ class _GitSourceBase(Source):
def _load_tags(self, node):
tags = []
- tags_node = self.node_get_member(node, list, 'tags', [])
+ tags_node = node.get_sequence('tags', [])
for tag_node in tags_node:
tag = tag_node.get_scalar('tag').as_str()
commit_ref = tag_node.get_scalar('commit').as_str()
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index e29e769cf..65d941c44 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -41,7 +41,7 @@ class Includes:
includes = [includes]
except LoadError:
try:
- includes = _yaml.node_get(node, list, '(@)')
+ includes = node.get_sequence('(@)').as_str_list()
except LoadError:
provenance = _yaml.node_get_provenance(node, key='(@)')
raise LoadError(LoadErrorReason.INVALID_DATA,
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 791a21332..a455d5935 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -469,13 +469,12 @@ class Loader():
elt_provenance = _yaml.node_get_provenance(node)
meta_sources = []
- sources = _yaml.node_get(node, list, Symbol.SOURCES, default_value=[])
+ sources = node.get_sequence(Symbol.SOURCES, default=[])
element_kind = node.get_scalar(Symbol.KIND).as_str()
# Safe loop calling into _yaml.node_get() for each element ensures
# we have good error reporting
- for i in range(len(sources)):
- source = _yaml.node_get(node, dict, Symbol.SOURCES, indices=[i])
+ for index, source in enumerate(sources):
kind = source.get_scalar(Symbol.KIND).as_str()
_yaml.node_del(source, Symbol.KIND)
@@ -484,7 +483,6 @@ class Loader():
if directory:
_yaml.node_del(source, Symbol.DIRECTORY)
- index = sources.index(source)
meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
meta_sources.append(meta_source)
@@ -493,7 +491,7 @@ class Loader():
node.get_mapping(Symbol.CONFIG, default={}),
node.get_mapping(Symbol.VARIABLES, default={}),
node.get_mapping(Symbol.ENVIRONMENT, default={}),
- _yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
+ node.get_sequence(Symbol.ENV_NOCACHE, default=[]).as_str_list(),
node.get_mapping(Symbol.PUBLIC, default={}),
node.get_mapping(Symbol.SANDBOX, default={}),
element_kind == 'junction')
diff --git a/src/buildstream/_loader/types.pyx b/src/buildstream/_loader/types.pyx
index 14ebeab7a..b24f6893d 100644
--- a/src/buildstream/_loader/types.pyx
+++ b/src/buildstream/_loader/types.pyx
@@ -72,8 +72,8 @@ cdef class Dependency:
self.provenance = provenance
- if type(dep) is str:
- self.name = <str> dep
+ if type(dep) is _yaml.ScalarNode:
+ self.name = dep.as_str()
self.dep_type = default_dep_type
self.junction = None
@@ -138,15 +138,15 @@ cdef class Dependency:
# acc (list): a list in which to add the loaded dependencies
#
cdef void _extract_depends_from_node(_yaml.Node node, str key, str default_dep_type, list acc) except *:
- cdef list depends = <list> _yaml.node_get(node, list, key, None, [])
- cdef int index
+ cdef _yaml.SequenceNode depends = node.get_sequence(key, [])
+ cdef _yaml.Node dep_node
cdef _yaml.ProvenanceInformation dep_provenance
- for index in range(len(depends)):
+ for dep_node in depends:
# FIXME: the provenance information would be obtainable from the Node directly if we stop
# stripping provenance and have proper nodes for str elements
- dep_provenance = <_yaml.ProvenanceInformation> _yaml.node_get_provenance(node, key=key, indices=[index])
- dependency = Dependency(depends[index], dep_provenance, default_dep_type=default_dep_type)
+ dep_provenance = <_yaml.ProvenanceInformation> _yaml.node_get_provenance(dep_node)
+ dependency = Dependency(dep_node, dep_provenance, default_dep_type=default_dep_type)
acc.append(dependency)
# Now delete the field, we dont want it anymore
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index f8daa82b1..ec63d5d32 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -39,7 +39,7 @@ class OptionEnum(Option):
_yaml.node_validate(node, valid_symbols)
- self.values = _yaml.node_get(node, list, 'values', default_value=[])
+ self.values = node.get_sequence('values', default=[]).as_str_list()
if not self.values:
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: No values specified for {} option '{}'"
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index ba16244ba..a7dd536f6 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -46,11 +46,11 @@ class OptionFlags(Option):
"{}: No values specified for {} option '{}'"
.format(_yaml.node_get_provenance(node), self.OPTION_TYPE, self.name))
- self.value = _yaml.node_get(node, list, 'default', default_value=[])
+ self.value = node.get_sequence('default', default=[]).as_str_list()
self.validate(self.value, _yaml.node_get_provenance(node, 'default'))
def load_value(self, node, *, transform=None):
- self.value = _yaml.node_get(node, list, self.name)
+ self.value = node.get_sequence(self.name).as_str_list()
if transform:
self.value = [transform(x) for x in self.value]
self.value = sorted(self.value)
@@ -83,4 +83,4 @@ class OptionFlags(Option):
def load_valid_values(self, node):
# Allow the more descriptive error to raise when no values
# exist rather than bailing out here (by specifying default_value)
- return _yaml.node_get(node, list, 'values', default_value=[])
+ return node.get_sequence('values', default=[]).as_str_list()
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index c6d23f59e..1b8683186 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -248,7 +248,7 @@ class OptionPool():
# Return true if a conditional was processed.
#
def _process_one_node(self, node):
- conditions = _yaml.node_get(node, list, '(?)', default_value=None)
+ conditions = node.get_sequence('(?)', default=None)
assertion = node.get_scalar('(!)', default=None).as_str()
# Process assersions first, we want to abort on the first encountered
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index c4132c3f8..a9e69531b 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -138,7 +138,7 @@ class PluginContext():
loaded_dependency = False
for origin in self._plugin_origins:
- if kind not in _yaml.node_get(origin, list, 'plugins'):
+ if kind not in origin.get_sequence('plugins').as_str_list():
continue
if origin.get_scalar('origin').as_str() == 'local':
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index b7cdeea7d..7b7f69ac4 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -607,10 +607,10 @@ class Project():
defaults = pre_config_node.get_mapping('defaults')
_yaml.node_validate(defaults, ['targets'])
- self._default_targets = _yaml.node_get(defaults, list, "targets")
+ self._default_targets = defaults.get_sequence("targets").as_str_list()
# Fatal warnings
- self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
+ self._fatal_warnings = pre_config_node.get_sequence('fatal-warnings', default=[]).as_str_list()
self.loader = Loader(self._context, self,
parent=parent_loader)
@@ -683,7 +683,7 @@ class Project():
# Load sandbox environment variables
self.base_environment = config.get_mapping('environment')
- self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
+ self.base_env_nocache = config.get_sequence('environment-nocache').as_str_list()
# Load sandbox configuration
self._sandbox = config.get_mapping('sandbox')
@@ -715,7 +715,7 @@ class Project():
# Parse shell options
shell_options = config.get_mapping('shell')
_yaml.node_validate(shell_options, ['command', 'environment', 'host-files'])
- self._shell_command = _yaml.node_get(shell_options, list, 'command')
+ self._shell_command = shell_options.get_sequence('command')
# Perform environment expansion right away
shell_environment = shell_options.get_mapping('environment', default={})
@@ -724,7 +724,7 @@ class Project():
self._shell_environment[key] = os.path.expandvars(value)
# Host files is parsed as a list for convenience
- host_files = _yaml.node_get(shell_options, list, 'host-files', default_value=[])
+ host_files = shell_options.get_sequence('host-files', default=[])
for host_file in host_files:
if isinstance(host_file, str):
mount = HostMount(host_file)
@@ -814,7 +814,7 @@ class Project():
output.default_mirror = self._default_mirror or overrides.get_scalar(
'default-mirror', default=None).as_str()
- mirrors = _yaml.node_get(config, list, 'mirrors', default_value=[])
+ mirrors = config.get_sequence('mirrors', default=[])
for mirror in mirrors:
allowed_mirror_fields = [
'name', 'aliases'
@@ -874,7 +874,7 @@ class Project():
plugin_element_origins = [] # Origins of custom elements
# Plugin origins and versions
- origins = _yaml.node_get(config, list, 'plugins', default_value=[])
+ origins = config.get_sequence('plugins', default=[])
source_format_versions = {}
element_format_versions = {}
for origin in origins:
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 847935c0b..f296858cf 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -133,16 +133,16 @@ class ProjectRefs():
# Fetch the element
try:
- element_list = _yaml.node_get(project_node, list, element)
+ element_list = project_node.get_sequence(element)
except LoadError:
if not ensure:
return None
- element_list = []
+ element_list = _yaml.new_empty_list_node()
_yaml.node_set(project_node, element, element_list)
# Fetch the source index
try:
- node = element_list[source_index]
+ node = element_list.mapping_at(source_index)
except IndexError:
if not ensure:
return None
diff --git a/src/buildstream/_yaml.pxd b/src/buildstream/_yaml.pxd
index 11271610e..15428b775 100644
--- a/src/buildstream/_yaml.pxd
+++ b/src/buildstream/_yaml.pxd
@@ -32,6 +32,7 @@ cdef class MappingNode(Node):
cdef Node get(self, str key, default, default_constructor)
cpdef MappingNode get_mapping(self, str key, default=*)
cpdef ScalarNode get_scalar(self, str key, default=*)
+ cpdef SequenceNode get_sequence(Self, str key, default=*)
cdef class ScalarNode(Node):
@@ -41,6 +42,12 @@ cdef class ScalarNode(Node):
cpdef bint is_none(self)
+cdef class SequenceNode(Node):
+ cpdef MappingNode mapping_at(self, int index)
+ cpdef SequenceNode sequence_at(self, int index)
+ cpdef list as_str_list(self)
+
+
cdef class ProvenanceInformation:
cdef public Node node
diff --git a/src/buildstream/_yaml.pyx b/src/buildstream/_yaml.pyx
index 85b8098c4..626f75d64 100644
--- a/src/buildstream/_yaml.pyx
+++ b/src/buildstream/_yaml.pyx
@@ -167,14 +167,60 @@ cdef class MappingNode(Node):
return value
+ cpdef SequenceNode get_sequence(self, str key, default=_sentinel):
+ value = self.get(key, default, SequenceNode)
-class SequenceNode(Node):
+ if type(value) is not SequenceNode and value is not None:
+ provenance = node_get_provenance(self)
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, key, ScalarNode.__name__))
+
+ return value
+
+
+cdef class SequenceNode(Node):
def __init__(self, list value, int file_index, int line, int column):
self.value = value
self.file_index = file_index
self.line = line
self.column = column
+ cpdef MappingNode mapping_at(self, int index):
+ value = self.value[index]
+
+ if type(value) is not MappingNode:
+ provenance = node_get_provenance(self)
+ path = ["[{}]".format(p) for p in node_find_target(provenance, self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, MappingNode.__name__))
+ return value
+
+ cpdef SequenceNode sequence_at(self, int index):
+ value = self.value[index]
+
+ if type(value) is not SequenceNode:
+ provenance = node_get_provenance(self)
+ path = ["[{}]".format(p) for p in node_find_target(provenance, self)] + ["[{}]".format(index)]
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Value of '{}' is not of the expected type '{}'"
+ .format(provenance, path, SequenceNode.__name__))
+
+ return value
+
+ cpdef list as_str_list(self):
+ return [node.as_str() for node in self.value]
+
+ def __iter__(self):
+ return iter(self.value)
+
+ def __len__(self):
+ return len(self.value)
+
+ def __reversed__(self):
+ return reversed(self.value)
+
# Metadata container for a yaml toplevel node.
#
@@ -920,6 +966,11 @@ def new_empty_node(Node ref_node=None):
return MappingNode({}, _SYNTHETIC_FILE_INDEX, 0, 0)
+# FIXME: we should never need that
+def new_empty_list_node():
+ return SequenceNode([], _SYNTHETIC_FILE_INDEX, 0, 0)
+
+
# new_node_from_dict()
#
# Args:
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 158f5fc11..0c8205bd6 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -281,14 +281,11 @@ class BuildElement(Element):
# Private Local Methods #
#############################################################
def __get_commands(self, node, name):
- list_node = self.node_get_member(node, list, name, [])
- commands = []
-
- for i in range(len(list_node)):
- command = self.node_subst_list_element(node, name, [i])
- commands.append(command)
-
- return commands
+ list_node = node.get_sequence(name, [])
+ return [
+ self.substitute_variables(node.as_str())
+ for node in list_node
+ ]
def __run_command(self, sandbox, cmd):
# Note the -e switch to 'sh' means to exit with an error
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 7f824fc95..33a00b197 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -482,6 +482,9 @@ class Element(Plugin):
return None
+ def substitute_variables(self, value):
+ return self.__variables.subst(value)
+
def node_subst_member(self, node, member_name, default=_yaml._sentinel):
"""Fetch the value of a string node member, substituting any variables
in the loaded value with the element contextual variables.
@@ -851,9 +854,9 @@ class Element(Plugin):
if bstdata is not None:
with sandbox.batch(SandboxFlags.NONE):
- commands = self.node_get_member(bstdata, list, 'integration-commands', [])
- for i in range(len(commands)):
- cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
+ commands = bstdata.get_sequence('integration-commands', [])
+ for command in commands:
+ cmd = self.substitute_variables(command)
sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
label=cmd)
@@ -2605,7 +2608,7 @@ class Element(Plugin):
else:
project_nocache = project.base_env_nocache
- default_nocache = _yaml.node_get(cls.__defaults, list, 'environment-nocache', default_value=[])
+ default_nocache = cls.__defaults.get_sequence('environment-nocache', default=[]).as_str_list()
element_nocache = meta.env_nocache
# Accumulate values from the element default, the project and the element
diff --git a/tests/format/include.py b/tests/format/include.py
index 0bfc2b342..59e530166 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -197,7 +197,7 @@ def test_include_element_overrides_composition(cli, datafiles):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert _yaml.node_get(loaded, list, 'build-commands') == ['first', 'second']
+ assert loaded.get_sequence('build-commands').as_str_list() == ['first', 'second']
@pytest.mark.datafiles(DATA_DIR)
@@ -215,7 +215,7 @@ def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
# Assert that the explicitly overwritten public data is present
bst = loaded.get_mapping('bst')
assert 'foo-commands' in bst
- assert _yaml.node_get(bst, list, 'foo-commands') == ['need', 'this']
+ assert bst.get_sequence('foo-commands').as_str_list() == ['need', 'this']
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index 562f7b002..c306dad5c 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -14,7 +14,7 @@ def make_includes(basedir):
return Includes(loader)
-def test_main_has_prority(tmpdir):
+def test_main_has_priority(tmpdir):
includes = make_includes(str(tmpdir))
_yaml.dump({'(@)': ['a.yml'],
@@ -28,7 +28,7 @@ def test_main_has_prority(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_include_cannot_append(tmpdir):
@@ -44,7 +44,7 @@ def test_include_cannot_append(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_main_can_append(tmpdir):
@@ -60,7 +60,7 @@ def test_main_can_append(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a', 'main']
+ assert main.get_sequence('test').as_str_list() == ['a', 'main']
def test_sibling_cannot_append_backward(tmpdir):
@@ -77,7 +77,7 @@ def test_sibling_cannot_append_backward(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_sibling_can_append_forward(tmpdir):
@@ -94,7 +94,7 @@ def test_sibling_can_append_forward(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a', 'b']
+ assert main.get_sequence('test').as_str_list() == ['a', 'b']
def test_lastest_sibling_has_priority(tmpdir):
@@ -111,7 +111,7 @@ def test_lastest_sibling_has_priority(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_main_keeps_keys(tmpdir):
@@ -127,7 +127,7 @@ def test_main_keeps_keys(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['a']
+ assert main.get_sequence('test').as_str_list() == ['a']
assert main.get_scalar('something').as_str() == 'else'
@@ -151,5 +151,5 @@ def test_overwrite_directive_on_later_composite(tmpdir):
includes.process(main)
- assert _yaml.node_get(main, list, 'test') == ['Overwritten']
+ assert main.get_sequence('test').as_str_list() == ['Overwritten']
assert main.get_scalar('foo').as_str() == 'should be present'
diff --git a/tests/format/options.py b/tests/format/options.py
index 38c0c12c6..965a98307 100644
--- a/tests/format/options.py
+++ b/tests/format/options.py
@@ -223,8 +223,8 @@ def test_deep_nesting_level1(cli, datafiles, option, expected):
'element.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = _yaml.node_get(loaded, list, 'shallow-nest')
- first_dict = shallow_list[0]
+ shallow_list = loaded.get_sequence('shallow-nest')
+ first_dict = shallow_list.mapping_at(0)
assert first_dict.get_scalar('animal').as_str() == expected
@@ -244,8 +244,8 @@ def test_deep_nesting_level2(cli, datafiles, option, expected):
'element-deeper.bst'])
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = _yaml.node_get(loaded, list, 'deep-nest')
- deeper_list = shallow_list[0]
- first_dict = deeper_list[0]
+ shallow_list = loaded.get_sequence('deep-nest')
+ deeper_list = shallow_list.sequence_at(0)
+ first_dict = deeper_list.mapping_at(0)
assert first_dict.get_scalar('animal').as_str() == expected
diff --git a/tests/format/projectoverrides.py b/tests/format/projectoverrides.py
index 4b0c3f4d0..730e43b1e 100644
--- a/tests/format/projectoverrides.py
+++ b/tests/format/projectoverrides.py
@@ -24,6 +24,6 @@ def test_prepend_configure_commands(cli, datafiles):
result.assert_success()
loaded = _yaml.load_data(result.output)
- config_commands = _yaml.node_get(loaded, list, 'configure-commands')
+ config_commands = loaded.get_sequence('configure-commands').as_str_list()
assert len(config_commands) == 3
assert config_commands[0] == 'echo "Hello World!"'