summaryrefslogtreecommitdiff
path: root/src/buildstream/_project.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/buildstream/_project.py')
-rw-r--r--src/buildstream/_project.py168
1 files changed, 41 insertions, 127 deletions
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index bc361d288..812d96d5a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -118,10 +118,7 @@ class Project:
self._context = context # The invocation Context, a private member
if search_for_project:
- (
- self.directory,
- self._invoked_from_workspace_element,
- ) = self._find_project_dir(directory)
+ (self.directory, self._invoked_from_workspace_element,) = self._find_project_dir(directory)
else:
self.directory = directory
self._invoked_from_workspace_element = None
@@ -270,16 +267,14 @@ class Project:
if full_path.is_symlink():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' must not point to "
- "symbolic links ".format(provenance, path_str),
+ "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' first component must "
- "not be '..'".format(provenance, path_str),
+ "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID,
)
@@ -287,14 +282,11 @@ class Project:
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
full_resolved_path = full_path.resolve()
else:
- full_resolved_path = full_path.resolve(
- strict=True
- ) # pylint: disable=unexpected-keyword-arg
+ full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' does not exist".format(provenance, path_str),
- LoadErrorReason.MISSING_FILE,
+ "{}: Specified path '{}' does not exist".format(provenance, path_str), LoadErrorReason.MISSING_FILE,
)
is_inside = self._absolute_directory_path in full_resolved_path.parents or (
@@ -313,37 +305,28 @@ class Project:
provenance = node.get_provenance()
raise LoadError(
"{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root.".format(
- provenance, path
- ),
+ "Please specify a path relative to the project's root.".format(provenance, path),
LoadErrorReason.PROJ_PATH_INVALID,
)
- if full_resolved_path.is_socket() or (
- full_resolved_path.is_fifo() or full_resolved_path.is_block_device()
- ):
+ if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' points to an unsupported "
- "file kind".format(provenance, path_str),
+ "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_file and not full_resolved_path.is_file():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a regular file".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a regular file".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_dir and not full_resolved_path.is_dir():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a directory".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a directory".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
@@ -391,9 +374,7 @@ class Project:
#
def create_element(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.element_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.element_factory.create(self._context, self, meta)
else:
return self.config.element_factory.create(self._context, self, meta)
@@ -423,9 +404,7 @@ class Project:
#
def create_source(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.source_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.source_factory.create(self._context, self, meta)
else:
return self.config.source_factory.create(self._context, self, meta)
@@ -461,9 +440,7 @@ class Project:
else:
config = self.config
- if (
- not alias or alias not in config._aliases
- ): # pylint: disable=unsupported-membership-test
+ if not alias or alias not in config._aliases: # pylint: disable=unsupported-membership-test
return [None]
mirror_list = []
@@ -490,15 +467,9 @@ class Project:
# (list): A list of loaded Element
#
def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
- with self._context.messenger.simple_task(
- "Loading elements", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
meta_elements = self.loader.load(
- targets,
- task,
- rewritable=rewritable,
- ticker=None,
- ignore_workspaces=ignore_workspaces,
+ targets, task, rewritable=rewritable, ticker=None, ignore_workspaces=ignore_workspaces,
)
with self._context.messenger.simple_task("Resolving elements") as task:
@@ -512,20 +483,11 @@ class Project:
# been discovered in the resolve() phase.
redundant_refs = Element._get_redundant_source_refs()
if redundant_refs:
- detail = (
- "The following inline specified source references will be ignored:\n\n"
- )
- lines = [
- "{}:{}".format(source._get_provenance(), ref)
- for source, ref in redundant_refs
- ]
+ detail = "The following inline specified source references will be ignored:\n\n"
+ lines = ["{}:{}".format(source._get_provenance(), ref) for source, ref in redundant_refs]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(
- MessageType.WARN,
- "Ignoring redundant source references",
- detail=detail,
- )
+ Message(MessageType.WARN, "Ignoring redundant source references", detail=detail,)
)
return elements
@@ -551,9 +513,7 @@ class Project:
#
artifacts = []
for ref in targets:
- artifacts.append(
- ArtifactElement._new_from_artifact_ref(ref, self._context, task)
- )
+ artifacts.append(ArtifactElement._new_from_artifact_ref(ref, self._context, task))
ArtifactElement._clear_artifact_refs_cache()
@@ -667,9 +627,7 @@ class Project:
major, minor = utils.get_bst_version()
raise LoadError(
"Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
- "Use latest 1.x release".format(
- format_version, major, minor, BST_FORMAT_VERSION_MIN
- ),
+ "Use latest 1.x release".format(format_version, major, minor, BST_FORMAT_VERSION_MIN),
LoadErrorReason.UNSUPPORTED_PROJECT,
)
@@ -690,15 +648,10 @@ class Project:
self.name = self._project_conf.get_str("name")
# Validate that project name is a valid symbol name
- _assert_symbol_name(
- self.name, "project name", ref_node=pre_config_node.get_node("name")
- )
+ _assert_symbol_name(self.name, "project name", ref_node=pre_config_node.get_node("name"))
self.element_path = os.path.join(
- self.directory,
- self.get_path_from_node(
- pre_config_node.get_scalar("element-path"), check_is_dir=True
- ),
+ self.directory, self.get_path_from_node(pre_config_node.get_scalar("element-path"), check_is_dir=True),
)
self.config.options = OptionPool(self.element_path)
@@ -709,16 +662,9 @@ class Project:
self._default_targets = defaults.get_str_list("targets")
# Fatal warnings
- self._fatal_warnings = pre_config_node.get_str_list(
- "fatal-warnings", default=[]
- )
+ self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
- self.loader = Loader(
- self._context,
- self,
- parent=parent_loader,
- fetch_subprojects=fetch_subprojects,
- )
+ self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects,)
self._project_includes = Includes(self.loader, copy_tree=False)
@@ -738,9 +684,7 @@ class Project:
]:
p = ref_storage_node.get_provenance()
raise LoadError(
- "{}: Invalid value '{}' specified for ref-storage".format(
- p, self.ref_storage
- ),
+ "{}: Invalid value '{}' specified for ref-storage".format(p, self.ref_storage),
LoadErrorReason.INVALID_DATA,
)
@@ -767,32 +711,24 @@ class Project:
#
# Load artifacts pull/push configuration for this project
- self.artifact_cache_specs = ArtifactCache.specs_from_config_node(
- config, self.directory
- )
+ self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
# If there is a junction Element which specifies that we want to remotely cache
# its elements, append the junction's remotes to the artifact cache specs list
if self.junction:
parent = self.junction._get_project()
if self.junction.cache_junction_elements:
- self.artifact_cache_specs = (
- parent.artifact_cache_specs + self.artifact_cache_specs
- )
+ self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
if self.junction.ignore_junction_remotes:
self.artifact_cache_specs = []
# Load source caches with pull/push config
- self.source_cache_specs = SourceCache.specs_from_config_node(
- config, self.directory
- )
+ self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
- override_specs = SandboxRemote.specs_from_config_node(
- self._context.get_overrides(self.name), self.directory
- )
+ override_specs = SandboxRemote.specs_from_config_node(self._context.get_overrides(self.name), self.directory)
if override_specs is not None:
self.remote_execution_specs = override_specs
@@ -824,9 +760,7 @@ class Project:
)
)
- if (
- CoreWarnings.OVERLAPS not in self._fatal_warnings
- ) and fail_on_overlap.as_bool():
+ if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap.as_bool():
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Load project.refs if it exists, this may be ignored.
@@ -889,18 +823,14 @@ class Project:
output.options.load(options_node)
if self.junction:
# load before user configuration
- output.options.load_yaml_values(
- self.junction.options, transform=self.junction.node_subst_vars
- )
+ output.options.load_yaml_values(self.junction.options, transform=self.junction.node_subst_vars)
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
override_options = overrides.get_mapping("options", default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
- output.options.load_cli_values(
- self._cli_options, ignore_unknown=ignore_unknown
- )
+ output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
# We're done modifying options, now we can use them for substitutions
output.options.resolve()
@@ -938,9 +868,7 @@ class Project:
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
- output.default_mirror = self._default_mirror or overrides.get_str(
- "default-mirror", default=None
- )
+ output.default_mirror = self._default_mirror or overrides.get_str("default-mirror", default=None)
mirrors = config.get_sequence("mirrors", default=[])
for mirror in mirrors:
@@ -949,9 +877,7 @@ class Project:
mirror_name = mirror.get_str("name")
alias_mappings = {}
for alias_mapping, uris in mirror.get_mapping("aliases").items():
- assert (
- type(uris) is SequenceNode
- ) # pylint: disable=unidiomatic-typecheck
+ assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
@@ -978,9 +904,7 @@ class Project:
def _find_project_dir(self, directory):
workspace_element = None
config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
- found_directory, filename = utils._search_upward_for_files(
- directory, config_filenames
- )
+ found_directory, filename = utils._search_upward_for_files(directory, config_filenames)
if filename == _PROJECT_CONF_FILE:
project_directory = found_directory
elif filename == WORKSPACE_PROJECT_FILE:
@@ -1022,8 +946,7 @@ class Project:
for key in source_versions.keys():
if key in source_format_versions:
raise LoadError(
- "Duplicate listing of source '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of source '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
source_format_versions[key] = source_versions.get_int(key)
@@ -1032,8 +955,7 @@ class Project:
for key in element_versions.keys():
if key in element_format_versions:
raise LoadError(
- "Duplicate listing of element '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of element '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
element_format_versions[key] = element_versions.get_int(key)
@@ -1047,14 +969,10 @@ class Project:
pluginbase = PluginBase(package="buildstream.plugins")
output.element_factory = ElementFactory(
- pluginbase,
- plugin_origins=plugin_element_origins,
- format_versions=element_format_versions,
+ pluginbase, plugin_origins=plugin_element_origins, format_versions=element_format_versions,
)
output.source_factory = SourceFactory(
- pluginbase,
- plugin_origins=plugin_source_origins,
- format_versions=source_format_versions,
+ pluginbase, plugin_origins=plugin_source_origins, format_versions=source_format_versions,
)
# _store_origin()
@@ -1074,9 +992,7 @@ class Project:
expected_groups = ["sources", "elements"]
if plugin_group not in expected_groups:
raise LoadError(
- "Unexpected plugin group: {}, expecting {}".format(
- plugin_group, expected_groups
- ),
+ "Unexpected plugin group: {}, expecting {}".format(plugin_group, expected_groups),
LoadErrorReason.INVALID_DATA,
)
if plugin_group in origin.keys():
@@ -1089,9 +1005,7 @@ class Project:
del origin_node[group]
if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
- path = self.get_path_from_node(
- origin.get_scalar("path"), check_is_dir=True
- )
+ path = self.get_path_from_node(origin.get_scalar("path"), check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
origin_node["path"] = os.path.join(self.directory, path)
destination.append(origin_node)