summaryrefslogtreecommitdiff
path: root/src/buildstream
diff options
context:
space:
mode:
Diffstat (limited to 'src/buildstream')
-rw-r--r--src/buildstream/_artifact.py47
-rw-r--r--src/buildstream/_artifactcache.py79
-rw-r--r--src/buildstream/_artifactelement.py8
-rw-r--r--src/buildstream/_basecache.py35
-rw-r--r--src/buildstream/_cachekey.py4
-rw-r--r--src/buildstream/_cas/cascache.py139
-rw-r--r--src/buildstream/_cas/casremote.py20
-rw-r--r--src/buildstream/_cas/casserver.py71
-rw-r--r--src/buildstream/_context.py61
-rw-r--r--src/buildstream/_exceptions.py52
-rw-r--r--src/buildstream/_frontend/app.py148
-rw-r--r--src/buildstream/_frontend/cli.py370
-rw-r--r--src/buildstream/_frontend/complete.py37
-rw-r--r--src/buildstream/_frontend/status.py48
-rw-r--r--src/buildstream/_frontend/widget.py211
-rw-r--r--src/buildstream/_fuse/fuse.py104
-rw-r--r--src/buildstream/_fuse/mount.py22
-rw-r--r--src/buildstream/_gitsourcebase.py186
-rw-r--r--src/buildstream/_includes.py51
-rw-r--r--src/buildstream/_loader/loader.py155
-rw-r--r--src/buildstream/_loader/metasource.py4
-rw-r--r--src/buildstream/_message.py12
-rw-r--r--src/buildstream/_messenger.py58
-rw-r--r--src/buildstream/_options/option.py4
-rw-r--r--src/buildstream/_options/optionarch.py8
-rw-r--r--src/buildstream/_options/optionbool.py3
-rw-r--r--src/buildstream/_options/optionenum.py4
-rw-r--r--src/buildstream/_options/optionflags.py4
-rw-r--r--src/buildstream/_options/optionpool.py38
-rw-r--r--src/buildstream/_pipeline.py73
-rw-r--r--src/buildstream/_platform/darwin.py3
-rw-r--r--src/buildstream/_platform/platform.py18
-rw-r--r--src/buildstream/_platform/win32.py4
-rw-r--r--src/buildstream/_plugincontext.py69
-rw-r--r--src/buildstream/_profile.py8
-rw-r--r--src/buildstream/_project.py168
-rw-r--r--src/buildstream/_projectrefs.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2.py20
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2.py32
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py12
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/api/annotations_pb2.py5
-rw-r--r--src/buildstream/_protos/google/api/http_pb2.py14
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2.py9
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2.py42
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/rpc/code_pb2.py82
-rw-r--r--src/buildstream/_remote.py43
-rw-r--r--src/buildstream/_scheduler/jobs/elementjob.py12
-rw-r--r--src/buildstream/_scheduler/jobs/job.py136
-rw-r--r--src/buildstream/_scheduler/jobs/jobpickler.py9
-rw-r--r--src/buildstream/_scheduler/queues/queue.py37
-rw-r--r--src/buildstream/_scheduler/resources.py5
-rw-r--r--src/buildstream/_scheduler/scheduler.py34
-rw-r--r--src/buildstream/_sourcecache.py60
-rw-r--r--src/buildstream/_state.py4
-rw-r--r--src/buildstream/_stream.py321
-rw-r--r--src/buildstream/_version.py24
-rw-r--r--src/buildstream/_workspaces.py69
-rw-r--r--src/buildstream/buildelement.py16
-rw-r--r--src/buildstream/element.py474
-rw-r--r--src/buildstream/plugin.py87
-rw-r--r--src/buildstream/plugins/elements/autotools.py4
-rw-r--r--src/buildstream/plugins/elements/compose.py12
-rw-r--r--src/buildstream/plugins/elements/filter.py37
-rw-r--r--src/buildstream/plugins/elements/import.py22
-rw-r--r--src/buildstream/plugins/elements/junction.py28
-rw-r--r--src/buildstream/plugins/elements/manual.py4
-rw-r--r--src/buildstream/plugins/elements/pip.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py29
-rw-r--r--src/buildstream/plugins/sources/bzr.py59
-rw-r--r--src/buildstream/plugins/sources/deb.py4
-rw-r--r--src/buildstream/plugins/sources/local.py3
-rw-r--r--src/buildstream/plugins/sources/patch.py16
-rw-r--r--src/buildstream/plugins/sources/pip.py15
-rw-r--r--src/buildstream/plugins/sources/remote.py7
-rw-r--r--src/buildstream/plugins/sources/tar.py13
-rw-r--r--src/buildstream/plugins/sources/workspace.py3
-rw-r--r--src/buildstream/plugins/sources/zip.py10
-rw-r--r--src/buildstream/sandbox/_mount.py16
-rw-r--r--src/buildstream/sandbox/_mounter.py21
-rw-r--r--src/buildstream/sandbox/_sandboxbuildbox.py37
-rw-r--r--src/buildstream/sandbox/_sandboxbwrap.py44
-rw-r--r--src/buildstream/sandbox/_sandboxchroot.py48
-rw-r--r--src/buildstream/sandbox/_sandboxdummy.py6
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py114
-rw-r--r--src/buildstream/sandbox/sandbox.py40
-rw-r--r--src/buildstream/scriptelement.py44
-rw-r--r--src/buildstream/source.py160
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py134
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py59
-rw-r--r--src/buildstream/testing/__init__.py9
-rw-r--r--src/buildstream/testing/_sourcetests/build_checkout.py5
-rw-r--r--src/buildstream/testing/_sourcetests/fetch.py12
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py28
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py13
-rw-r--r--src/buildstream/testing/_sourcetests/track.py65
-rw-r--r--src/buildstream/testing/_sourcetests/track_cross_junction.py22
-rw-r--r--src/buildstream/testing/_sourcetests/utils.py11
-rw-r--r--src/buildstream/testing/_sourcetests/workspace.py40
-rw-r--r--src/buildstream/testing/_utils/junction.py4
-rw-r--r--src/buildstream/testing/integration.py4
-rw-r--r--src/buildstream/testing/runcli.py76
-rw-r--r--src/buildstream/types.py16
-rw-r--r--src/buildstream/utils.py78
108 files changed, 1176 insertions, 4121 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index a06b189ed..1feda2246 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -61,18 +61,10 @@ class Artifact:
self._tmpdir = context.tmpdir
self._proto = None
- self._metadata_keys = (
- None # Strong and weak key tuple extracted from the artifact
- )
- self._metadata_dependencies = (
- None # Dictionary of dependency strong keys from the artifact
- )
- self._metadata_workspaced = (
- None # Boolean of whether it's a workspaced artifact
- )
- self._metadata_workspaced_dependencies = (
- None # List of which dependencies are workspaced from the artifact
- )
+ self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
+ self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
+ self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact
+ self._metadata_workspaced_dependencies = None # List of which dependencies are workspaced from the artifact
self._cached = None # Boolean of whether the artifact is cached
# get_files():
@@ -202,10 +194,7 @@ class Artifact:
size += buildtreevdir.get_size()
os.makedirs(
- os.path.dirname(
- os.path.join(self._artifactdir, element.get_artifact_name())
- ),
- exist_ok=True,
+ os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True,
)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
@@ -321,9 +310,7 @@ class Artifact:
# Extract proto
artifact = self._get_proto()
- self._metadata_dependencies = {
- dep.element_name: dep.cache_key for dep in artifact.build_deps
- }
+ self._metadata_dependencies = {dep.element_name: dep.cache_key for dep in artifact.build_deps}
return self._metadata_dependencies
@@ -388,11 +375,7 @@ class Artifact:
if deps == Scope.BUILD:
try:
dependency_refs = [
- os.path.join(
- dep.project_name,
- _get_normal_name(dep.element_name),
- dep.cache_key,
- )
+ os.path.join(dep.project_name, _get_normal_name(dep.element_name), dep.cache_key,)
for dep in artifact.build_deps
]
except AttributeError:
@@ -410,9 +393,7 @@ class Artifact:
# 1. It might trigger unnecessary rebuilds.
# 2. It would be impossible to support cyclic runtime dependencies
# in the future
- raise ArtifactError(
- "Dependency scope: {} is not supported for artifacts".format(deps)
- )
+ raise ArtifactError("Dependency scope: {} is not supported for artifacts".format(deps))
return dependency_refs
@@ -442,17 +423,13 @@ class Artifact:
# Determine whether directories are required
require_directories = context.require_artifact_directories
# Determine whether file contents are required as well
- require_files = (
- context.require_artifact_files or self._element._artifact_files_required()
- )
+ require_files = context.require_artifact_files or self._element._artifact_files_required()
# Check whether 'files' subdirectory is available, with or without file contents
if (
require_directories
and str(artifact.files)
- and not self._cas.contains_directory(
- artifact.files, with_files=require_files
- )
+ and not self._cas.contains_directory(artifact.files, with_files=require_files)
):
self._cached = False
return False
@@ -500,9 +477,7 @@ class Artifact:
key = self.get_extract_key()
- proto_path = os.path.join(
- self._artifactdir, self._element.get_artifact_name(key=key)
- )
+ proto_path = os.path.join(self._artifactdir, self._element.get_artifact_name(key=key))
artifact = ArtifactProto()
try:
with open(proto_path, mode="r+b") as f:
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 2eb738db1..40b23e126 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -195,12 +195,7 @@ class ArtifactCache(BaseCache):
# ([str]) - A list of artifact names as generated in LRU order
#
def list_artifacts(self, *, glob=None):
- return [
- ref
- for _, ref in sorted(
- list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob))
- )
- ]
+ return [ref for _, ref in sorted(list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob)))]
# remove():
#
@@ -239,9 +234,7 @@ class ArtifactCache(BaseCache):
removed = []
modified = []
- self.cas.diff_trees(
- digest_a, digest_b, added=added, removed=removed, modified=modified
- )
+ self.cas.diff_trees(digest_a, digest_b, added=added, removed=removed, modified=modified)
return modified, removed, added
@@ -271,14 +264,10 @@ class ArtifactCache(BaseCache):
# can perform file checks on their end
for remote in storage_remotes:
remote.init()
- element.status(
- "Pushing data from artifact {} -> {}".format(display_key, remote)
- )
+ element.status("Pushing data from artifact {} -> {}".format(display_key, remote))
if self._push_artifact_blobs(artifact, remote):
- element.info(
- "Pushed data from artifact {} -> {}".format(display_key, remote)
- )
+ element.info("Pushed data from artifact {} -> {}".format(display_key, remote))
else:
element.info(
"Remote ({}) already has all data of artifact {} cached".format(
@@ -295,9 +284,7 @@ class ArtifactCache(BaseCache):
pushed = True
else:
element.info(
- "Remote ({}) already has artifact {} cached".format(
- remote, element._get_brief_display_key()
- )
+ "Remote ({}) already has artifact {} cached".format(remote, element._get_brief_display_key())
)
return pushed
@@ -331,19 +318,14 @@ class ArtifactCache(BaseCache):
element.info("Pulled artifact {} <- {}".format(display_key, remote))
break
else:
- element.info(
- "Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- )
- )
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors and not artifact:
raise ArtifactError(
- "Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors),
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors),
)
# If we don't have an artifact, we can't exactly pull our
@@ -356,31 +338,20 @@ class ArtifactCache(BaseCache):
for remote in self._storage_remotes[project]:
remote.init()
try:
- element.status(
- "Pulling data for artifact {} <- {}".format(display_key, remote)
- )
+ element.status("Pulling data for artifact {} <- {}".format(display_key, remote))
- if self._pull_artifact_storage(
- element, artifact, remote, pull_buildtrees=pull_buildtrees
- ):
- element.info(
- "Pulled data for artifact {} <- {}".format(display_key, remote)
- )
+ if self._pull_artifact_storage(element, artifact, remote, pull_buildtrees=pull_buildtrees):
+ element.info("Pulled data for artifact {} <- {}".format(display_key, remote))
return True
- element.info(
- "Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- )
- )
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors:
raise ArtifactError(
- "Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors),
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors),
)
return False
@@ -424,8 +395,7 @@ class ArtifactCache(BaseCache):
if not push_remotes:
raise ArtifactError(
- "push_message was called, but no remote artifact "
- + "servers are configured as push remotes."
+ "push_message was called, but no remote artifact " + "servers are configured as push remotes."
)
for remote in push_remotes:
@@ -448,8 +418,7 @@ class ArtifactCache(BaseCache):
if not os.path.exists(os.path.join(self.artifactdir, newref)):
os.link(
- os.path.join(self.artifactdir, oldref),
- os.path.join(self.artifactdir, newref),
+ os.path.join(self.artifactdir, oldref), os.path.join(self.artifactdir, newref),
)
# get_artifact_logs():
@@ -622,15 +591,11 @@ class ArtifactCache(BaseCache):
except CASRemoteError as cas_error:
if cas_error.reason != "cache-too-full":
- raise ArtifactError(
- "Failed to push artifact blobs: {}".format(cas_error)
- )
+ raise ArtifactError("Failed to push artifact blobs: {}".format(cas_error))
return False
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
- raise ArtifactError(
- "Failed to push artifact blobs: {}".format(e.details())
- )
+ raise ArtifactError("Failed to push artifact blobs: {}".format(e.details()))
return False
return True
@@ -655,9 +620,7 @@ class ArtifactCache(BaseCache):
artifact_proto = artifact._get_proto()
- keys = list(
- utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key])
- )
+ keys = list(utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key]))
# Check whether the artifact is on the server
for key in keys:
@@ -665,18 +628,14 @@ class ArtifactCache(BaseCache):
remote.get_artifact(element.get_artifact_name(key=key))
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise ArtifactError(
- "Error checking artifact cache: {}".format(e.details())
- )
+ raise ArtifactError("Error checking artifact cache: {}".format(e.details()))
else:
return False
# If not, we send the artifact proto
for key in keys:
try:
- remote.update_artifact(
- element.get_artifact_name(key=key), artifact_proto
- )
+ remote.update_artifact(element.get_artifact_name(key=key), artifact_proto)
except grpc.RpcError as e:
raise ArtifactError("Failed to push artifact: {}".format(e.details()))
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index dfdd751a3..1c1c5db46 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -173,15 +173,11 @@ class ArtifactElement(Element):
#
def verify_artifact_ref(ref):
try:
- project, element, key = ref.split(
- "/", 2
- ) # This will raise a Value error if unable to split
+ project, element, key = ref.split("/", 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key length is not as expected
if not _cachekey.is_key(key):
raise ValueError
except ValueError:
- raise ArtifactElementError(
- "Artifact: {} is not of the expected format".format(ref)
- )
+ raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
return project, element, key
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index d277fa504..f4b5c602f 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -68,9 +68,7 @@ class BaseCache:
# against fork() with open gRPC channels.
#
def has_open_grpc_channels(self):
- for project_remotes in chain(
- self._index_remotes.values(), self._storage_remotes.values()
- ):
+ for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
for remote in project_remotes:
if remote.channel:
return True
@@ -82,9 +80,7 @@ class BaseCache:
#
def close_grpc_channels(self):
# Close all remotes and their gRPC channels
- for project_remotes in chain(
- self._index_remotes.values(), self._storage_remotes.values()
- ):
+ for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
for remote in project_remotes:
remote.close()
@@ -152,9 +148,7 @@ class BaseCache:
project_specs = getattr(project, cls.spec_name)
context_specs = getattr(context, cls.spec_name)
- return list(
- utils._deduplicate(project_extra_specs + project_specs + context_specs)
- )
+ return list(utils._deduplicate(project_extra_specs + project_specs + context_specs))
# setup_remotes():
#
@@ -207,9 +201,7 @@ class BaseCache:
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
- index_remotes, storage_remotes = self._create_remote_instances(
- on_failure=on_failure
- )
+ index_remotes, storage_remotes = self._create_remote_instances(on_failure=on_failure)
# Assign remote instances to their respective projects
for project in self.context.get_projects():
@@ -232,12 +224,8 @@ class BaseCache:
yield remote_list[remote_spec]
- self._index_remotes[project] = list(
- get_remotes(index_remotes, remote_specs)
- )
- self._storage_remotes[project] = list(
- get_remotes(storage_remotes, remote_specs)
- )
+ self._index_remotes[project] = list(get_remotes(index_remotes, remote_specs))
+ self._storage_remotes[project] = list(get_remotes(storage_remotes, remote_specs))
# has_fetch_remotes():
#
@@ -409,13 +397,10 @@ class BaseCache:
def _initialize_remotes(self):
def remote_failed(remote, error):
self._message(
- MessageType.WARN,
- "Failed to initialize remote {}: {}".format(remote.url, error),
+ MessageType.WARN, "Failed to initialize remote {}: {}".format(remote.url, error),
)
- with self.context.messenger.timed_activity(
- "Initializing remote caches", silent_nested=True
- ):
+ with self.context.messenger.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
# _list_refs_mtimes()
@@ -442,9 +427,7 @@ class BaseCache:
for root, _, files in os.walk(path):
for filename in files:
ref_path = os.path.join(root, filename)
- relative_path = os.path.relpath(
- ref_path, base_path
- ) # Relative to refs head
+ relative_path = os.path.relpath(ref_path, base_path) # Relative to refs head
if not glob_expr or fnmatch(relative_path, glob_expr):
# Obtain the mtime (the time a file was last modified)
yield (os.path.getmtime(ref_path), relative_path)
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index 8c6382bd5..dd9207516 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -62,7 +62,5 @@ def is_key(key):
# (str): An sha256 hex digest of the given value
#
def generate_key(value):
- ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode(
- "utf-8"
- )
+ ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode("utf-8")
return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 3caa745da..f1df9d1a2 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -73,13 +73,7 @@ class CASLogLevel(FastEnum):
#
class CASCache:
def __init__(
- self,
- path,
- *,
- casd=True,
- cache_quota=None,
- protect_session_blobs=True,
- log_level=CASLogLevel.WARNING
+ self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
):
self.casdir = os.path.join(path, "cas")
self.tmpdir = os.path.join(path, "tmp")
@@ -97,9 +91,7 @@ class CASCache:
# Place socket in global/user temporary directory to avoid hitting
# the socket path length limit.
self._casd_socket_tempdir = tempfile.mkdtemp(prefix="buildstream")
- self._casd_socket_path = os.path.join(
- self._casd_socket_tempdir, "casd.sock"
- )
+ self._casd_socket_path = os.path.join(self._casd_socket_tempdir, "casd.sock")
casd_args = [utils.get_host_tool("buildbox-casd")]
casd_args.append("--bind=unix:" + self._casd_socket_path)
@@ -155,24 +147,16 @@ class CASCache:
# casd is not ready yet, try again after a 10ms delay,
# but don't wait for more than 15s
if time.time() > self._casd_start_time + 15:
- raise CASCacheError(
- "Timed out waiting for buildbox-casd to become ready"
- )
+ raise CASCacheError("Timed out waiting for buildbox-casd to become ready")
time.sleep(0.01)
self._casd_channel = grpc.insecure_channel("unix:" + self._casd_socket_path)
- self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(
- self._casd_channel
- )
- self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(
- self._casd_channel
- )
+ self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self._casd_channel)
+ self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(self._casd_channel)
# Call GetCapabilities() to establish connection to casd
- capabilities = remote_execution_pb2_grpc.CapabilitiesStub(
- self._casd_channel
- )
+ capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self._casd_channel)
capabilities.GetCapabilities(remote_execution_pb2.GetCapabilitiesRequest())
# _get_cas():
@@ -201,9 +185,7 @@ class CASCache:
headdir = os.path.join(self.casdir, "refs", "heads")
objdir = os.path.join(self.casdir, "objects")
if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
- raise CASCacheError(
- "CAS repository check failed for '{}'".format(self.casdir)
- )
+ raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
# has_open_grpc_channels():
#
@@ -289,9 +271,7 @@ class CASCache:
if e.code() == grpc.StatusCode.NOT_FOUND:
return False
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- raise CASCacheError(
- "Unsupported buildbox-casd version: FetchTree unimplemented"
- ) from e
+ raise CASCacheError("Unsupported buildbox-casd version: FetchTree unimplemented") from e
raise
# checkout():
@@ -414,15 +394,7 @@ class CASCache:
#
# Either `path` or `buffer` must be passed, but not both.
#
- def add_object(
- self,
- *,
- digest=None,
- path=None,
- buffer=None,
- link_directly=False,
- instance_name=None
- ):
+ def add_object(self, *, digest=None, path=None, buffer=None, link_directly=False, instance_name=None):
# Exactly one of the two parameters has to be specified
assert (path is None) != (buffer is None)
@@ -450,21 +422,13 @@ class CASCache:
response = local_cas.CaptureFiles(request)
if len(response.responses) != 1:
- raise CASCacheError(
- "Expected 1 response from CaptureFiles, got {}".format(
- len(response.responses)
- )
- )
+ raise CASCacheError("Expected 1 response from CaptureFiles, got {}".format(len(response.responses)))
blob_response = response.responses[0]
if blob_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if blob_response.status.code != code_pb2.OK:
- raise CASCacheError(
- "Failed to capture blob {}: {}".format(
- path, blob_response.status.code
- )
- )
+ raise CASCacheError("Failed to capture blob {}: {}".format(path, blob_response.status.code))
digest.CopyFrom(blob_response.digest)
return digest
@@ -487,19 +451,13 @@ class CASCache:
response = local_cas.CaptureTree(request)
if len(response.responses) != 1:
- raise CASCacheError(
- "Expected 1 response from CaptureTree, got {}".format(
- len(response.responses)
- )
- )
+ raise CASCacheError("Expected 1 response from CaptureTree, got {}".format(len(response.responses)))
tree_response = response.responses[0]
if tree_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if tree_response.status.code != code_pb2.OK:
- raise CASCacheError(
- "Failed to capture tree {}: {}".format(path, tree_response.status.code)
- )
+ raise CASCacheError("Failed to capture tree {}: {}".format(path, tree_response.status.code))
treepath = self.objpath(tree_response.tree_digest)
tree = remote_execution_pb2.Tree()
@@ -547,9 +505,7 @@ class CASCache:
return digest
except FileNotFoundError as e:
- raise CASCacheError(
- "Attempt to access unavailable ref: {}".format(e)
- ) from e
+ raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
# update_mtime()
#
@@ -562,9 +518,7 @@ class CASCache:
try:
os.utime(self._refpath(ref))
except FileNotFoundError as e:
- raise CASCacheError(
- "Attempt to access unavailable ref: {}".format(e)
- ) from e
+ raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
# remove():
#
@@ -616,9 +570,7 @@ class CASCache:
missing_blobs = dict()
# Limit size of FindMissingBlobs request
for required_blobs_group in _grouper(iter(blobs), 512):
- request = remote_execution_pb2.FindMissingBlobsRequest(
- instance_name=instance_name
- )
+ request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=instance_name)
for required_digest in required_blobs_group:
d = request.blob_digests.add()
@@ -627,12 +579,8 @@ class CASCache:
try:
response = cas.FindMissingBlobs(request)
except grpc.RpcError as e:
- if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith(
- "Invalid instance name"
- ):
- raise CASCacheError(
- "Unsupported buildbox-casd version: FindMissingBlobs failed"
- ) from e
+ if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith("Invalid instance name"):
+ raise CASCacheError("Unsupported buildbox-casd version: FindMissingBlobs failed") from e
raise
for missing_digest in response.missing_blob_digests:
@@ -698,14 +646,10 @@ class CASCache:
a = 0
b = 0
while a < len(dir_a.files) or b < len(dir_b.files):
- if b < len(dir_b.files) and (
- a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name
- ):
+ if b < len(dir_b.files) and (a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name):
added.append(os.path.join(path, dir_b.files[b].name))
b += 1
- elif a < len(dir_a.files) and (
- b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name
- ):
+ elif a < len(dir_a.files) and (b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name):
removed.append(os.path.join(path, dir_a.files[a].name))
a += 1
else:
@@ -719,8 +663,7 @@ class CASCache:
b = 0
while a < len(dir_a.directories) or b < len(dir_b.directories):
if b < len(dir_b.directories) and (
- a >= len(dir_a.directories)
- or dir_a.directories[a].name > dir_b.directories[b].name
+ a >= len(dir_a.directories) or dir_a.directories[a].name > dir_b.directories[b].name
):
self.diff_trees(
None,
@@ -732,8 +675,7 @@ class CASCache:
)
b += 1
elif a < len(dir_a.directories) and (
- b >= len(dir_b.directories)
- or dir_b.directories[b].name > dir_a.directories[a].name
+ b >= len(dir_b.directories) or dir_b.directories[b].name > dir_a.directories[a].name
):
self.diff_trees(
dir_a.directories[a].digest,
@@ -838,9 +780,7 @@ class CASCache:
break
# Something went wrong here
- raise CASCacheError(
- "System error while removing ref '{}': {}".format(ref, e)
- ) from e
+ raise CASCacheError("System error while removing ref '{}': {}".format(ref, e)) from e
def _get_subdir(self, tree, subdir):
head, name = os.path.split(subdir)
@@ -858,9 +798,7 @@ class CASCache:
raise CASCacheError("Subdirectory {} not found".format(name))
- def _reachable_refs_dir(
- self, reachable, tree, update_mtime=False, check_exists=False
- ):
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, check_exists=False):
if tree.hash in reachable:
return
try:
@@ -891,10 +829,7 @@ class CASCache:
for dirnode in directory.directories:
self._reachable_refs_dir(
- reachable,
- dirnode.digest,
- update_mtime=update_mtime,
- check_exists=check_exists,
+ reachable, dirnode.digest, update_mtime=update_mtime, check_exists=check_exists,
)
# _temporary_object():
@@ -943,9 +878,7 @@ class CASCache:
return _CASBatchRead(remote)
# Helper function for _fetch_directory().
- def _fetch_directory_node(
- self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False
- ):
+ def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False):
in_local_cache = os.path.exists(self.objpath(digest))
if in_local_cache:
@@ -985,9 +918,7 @@ class CASCache:
while len(fetch_queue) + len(fetch_next_queue) > 0:
if not fetch_queue:
- batch = self._fetch_directory_batch(
- remote, batch, fetch_queue, fetch_next_queue
- )
+ batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
dir_digest = fetch_queue.pop(0)
@@ -999,12 +930,7 @@ class CASCache:
for dirnode in directory.directories:
batch = self._fetch_directory_node(
- remote,
- dirnode.digest,
- batch,
- fetch_queue,
- fetch_next_queue,
- recursive=True,
+ remote, dirnode.digest, batch, fetch_queue, fetch_next_queue, recursive=True,
)
# Fetch final batch
@@ -1116,10 +1042,7 @@ class CASCache:
if messenger:
messenger.message(
- Message(
- MessageType.WARN,
- "Buildbox-casd didn't exit in time and has been killed",
- )
+ Message(MessageType.WARN, "Buildbox-casd didn't exit in time and has been killed",)
)
self._casd_process = None
return
@@ -1155,9 +1078,7 @@ class CASCache:
# (subprocess.Process): The casd process that is used for the current cascache
#
def get_casd_process(self):
- assert (
- self._casd_process is not None
- ), "This should only be called with a running buildbox-casd process"
+ assert self._casd_process is not None, "This should only be called with a running buildbox-casd process"
return self._casd_process
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index f6be2cdab..a0308bdbf 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -83,9 +83,7 @@ class CASRemote(BaseRemote):
self.init()
- return self.cascache.add_object(
- buffer=message_buffer, instance_name=self.local_cas_instance_name
- )
+ return self.cascache.add_object(buffer=message_buffer, instance_name=self.local_cas_instance_name)
# Represents a batch of blobs queued for fetching.
@@ -125,25 +123,19 @@ class _CASBatchRead:
if missing_blobs is None:
raise BlobNotFound(
response.digest.hash,
- "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code
- ),
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code),
)
missing_blobs.append(response.digest)
if response.status.code != code_pb2.OK:
raise CASRemoteError(
- "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code
- )
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code)
)
if response.digest.size_bytes != len(response.data):
raise CASRemoteError(
"Failed to download blob {}: expected {} bytes, received {} bytes".format(
- response.digest.hash,
- response.digest.size_bytes,
- len(response.data),
+ response.digest.hash, response.digest.size_bytes, len(response.data),
)
)
@@ -188,8 +180,6 @@ class _CASBatchUpdate:
reason = None
raise CASRemoteError(
- "Failed to upload blob {}: {}".format(
- response.digest.hash, response.status.code
- ),
+ "Failed to upload blob {}: {}".format(response.digest.hash, response.status.code),
reason=reason,
)
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index 327b087c4..77f51256c 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -67,9 +67,7 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024
#
@contextmanager
def create_server(repo, *, enable_push, quota, index_only):
- cas = CASCache(
- os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False
- )
+ cas = CASCache(os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False)
try:
artifactdir = os.path.join(os.path.abspath(repo), "artifacts", "refs")
@@ -88,9 +86,7 @@ def create_server(repo, *, enable_push, quota, index_only):
_ContentAddressableStorageServicer(cas, enable_push=enable_push), server
)
- remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
- _CapabilitiesServicer(), server
- )
+ remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(_CapabilitiesServicer(), server)
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
_ReferenceStorageServicer(cas, enable_push=enable_push), server
@@ -100,22 +96,13 @@ def create_server(repo, *, enable_push, quota, index_only):
_ArtifactServicer(cas, artifactdir, update_cas=not index_only), server
)
- source_pb2_grpc.add_SourceServiceServicer_to_server(
- _SourceServicer(sourcedir), server
- )
+ source_pb2_grpc.add_SourceServiceServicer_to_server(_SourceServicer(sourcedir), server)
# Create up reference storage and artifact capabilities
- artifact_capabilities = buildstream_pb2.ArtifactCapabilities(
- allow_updates=enable_push
- )
- source_capabilities = buildstream_pb2.SourceCapabilities(
- allow_updates=enable_push
- )
+ artifact_capabilities = buildstream_pb2.ArtifactCapabilities(allow_updates=enable_push)
+ source_capabilities = buildstream_pb2.SourceCapabilities(allow_updates=enable_push)
buildstream_pb2_grpc.add_CapabilitiesServicer_to_server(
- _BuildStreamCapabilitiesServicer(
- artifact_capabilities, source_capabilities
- ),
- server,
+ _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities), server,
)
yield server
@@ -130,16 +117,10 @@ def create_server(repo, *, enable_push, quota, index_only):
@click.option("--server-cert", help="Public server certificate for TLS (PEM-encoded)")
@click.option("--client-certs", help="Public client certificates for TLS (PEM-encoded)")
@click.option(
- "--enable-push",
- is_flag=True,
- help="Allow clients to upload blobs and update artifact cache",
+ "--enable-push", is_flag=True, help="Allow clients to upload blobs and update artifact cache",
)
@click.option(
- "--quota",
- type=click.INT,
- default=10e9,
- show_default=True,
- help="Maximum disk usage in bytes",
+ "--quota", type=click.INT, default=10e9, show_default=True, help="Maximum disk usage in bytes",
)
@click.option(
"--index-only",
@@ -147,31 +128,24 @@ def create_server(repo, *, enable_push, quota, index_only):
help='Only provide the BuildStream artifact and source services ("index"), not the CAS ("storage")',
)
@click.argument("repo")
-def server_main(
- repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only
-):
+def server_main(repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only):
# Handle SIGTERM by calling sys.exit(0), which will raise a SystemExit exception,
# properly executing cleanup code in `finally` clauses and context managers.
# This is required to terminate buildbox-casd on SIGTERM.
signal.signal(signal.SIGTERM, lambda signalnum, frame: sys.exit(0))
- with create_server(
- repo, quota=quota, enable_push=enable_push, index_only=index_only
- ) as server:
+ with create_server(repo, quota=quota, enable_push=enable_push, index_only=index_only) as server:
use_tls = bool(server_key)
if bool(server_cert) != use_tls:
click.echo(
- "ERROR: --server-key and --server-cert are both required for TLS",
- err=True,
+ "ERROR: --server-key and --server-cert are both required for TLS", err=True,
)
sys.exit(-1)
if client_certs and not use_tls:
- click.echo(
- "ERROR: --client-certs can only be used with --server-key", err=True
- )
+ click.echo("ERROR: --client-certs can only be used with --server-key", err=True)
sys.exit(-1)
if use_tls:
@@ -274,9 +248,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
break
try:
- os.posix_fallocate(
- out.fileno(), 0, client_digest.size_bytes
- )
+ os.posix_fallocate(out.fileno(), 0, client_digest.size_bytes)
break
except OSError as e:
# Multiple upload can happen in the same time
@@ -322,9 +294,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return response
-class _ContentAddressableStorageServicer(
- remote_execution_pb2_grpc.ContentAddressableStorageServicer
-):
+class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
def __init__(self, cas, *, enable_push):
super().__init__()
self.cas = cas
@@ -426,9 +396,7 @@ class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
cache_capabilities.digest_function.append(remote_execution_pb2.SHA256)
cache_capabilities.action_cache_update_capabilities.update_enabled = False
cache_capabilities.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
- cache_capabilities.symlink_absolute_path_strategy = (
- remote_execution_pb2.CacheCapabilities.ALLOWED
- )
+ cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.CacheCapabilities.ALLOWED
response.deprecated_api_version.major = 2
response.low_api_version.major = 2
@@ -574,20 +542,17 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
directory.ParseFromString(f.read())
except FileNotFoundError:
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but no files found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but no files found".format(name),
)
except DecodeError:
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but directory not found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but directory not found".format(name),
)
def _check_file(self, name, digest, context):
if not os.path.exists(self.cas.objpath(digest)):
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but not found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but not found".format(name),
)
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 17fe691d4..c550a1e62 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -215,9 +215,7 @@ class Context:
# a $XDG_CONFIG_HOME/buildstream.conf file
#
if not config:
- default_config = os.path.join(
- os.environ["XDG_CONFIG_HOME"], "buildstream.conf"
- )
+ default_config = os.path.join(os.environ["XDG_CONFIG_HOME"], "buildstream.conf")
if os.path.exists(default_config):
config = default_config
@@ -232,9 +230,7 @@ class Context:
# Give obsoletion warnings
if "builddir" in defaults:
- raise LoadError(
- "builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA
- )
+ raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)
if "artifactdir" in defaults:
raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)
@@ -271,12 +267,9 @@ class Context:
# Relative paths don't make sense in user configuration. The exception is
# workspacedir where `.` is useful as it will be combined with the name
# specified on the command line.
- if not os.path.isabs(path) and not (
- directory == "workspacedir" and path == "."
- ):
+ if not os.path.isabs(path) and not (directory == "workspacedir" and path == "."):
raise LoadError(
- "{} must be an absolute path".format(directory),
- LoadErrorReason.INVALID_DATA,
+ "{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA,
)
# add directories not set by users
@@ -287,11 +280,7 @@ class Context:
# Move old artifact cas to cas if it exists and create symlink
old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
- if (
- os.path.exists(old_casdir)
- and not os.path.islink(old_casdir)
- and not os.path.exists(self.casdir)
- ):
+ if os.path.exists(old_casdir) and not os.path.islink(old_casdir) and not os.path.exists(self.casdir):
os.rename(old_casdir, self.casdir)
os.symlink(self.casdir, old_casdir)
@@ -316,9 +305,7 @@ class Context:
self.config_cache_quota_string = cache.get_str("quota")
try:
- self.config_cache_quota = utils._parse_size(
- self.config_cache_quota_string, cas_volume
- )
+ self.config_cache_quota = utils._parse_size(self.config_cache_quota_string, cas_volume)
except utils.UtilError as e:
raise LoadError(
"{}\nPlease specify the value in bytes or as a % of full disk space.\n"
@@ -335,9 +322,7 @@ class Context:
# Load remote execution config getting pull-artifact-files from it
remote_execution = defaults.get_mapping("remote-execution", default=None)
if remote_execution:
- self.pull_artifact_files = remote_execution.get_bool(
- "pull-artifact-files", default=True
- )
+ self.pull_artifact_files = remote_execution.get_bool("pull-artifact-files", default=True)
# This stops it being used in the remote service set up
remote_execution.safe_del("pull-artifact-files")
# Don't pass the remote execution settings if that was the only option
@@ -357,15 +342,7 @@ class Context:
# Load logging config
logging = defaults.get_mapping("logging")
logging.validate_keys(
- [
- "key-length",
- "verbose",
- "error-lines",
- "message-lines",
- "debug",
- "element-format",
- "message-format",
- ]
+ ["key-length", "verbose", "error-lines", "message-lines", "debug", "element-format", "message-format",]
)
self.log_key_length = logging.get_int("key-length")
self.log_debug = logging.get_bool("debug")
@@ -377,9 +354,7 @@ class Context:
# Load scheduler config
scheduler = defaults.get_mapping("scheduler")
- scheduler.validate_keys(
- ["on-error", "fetchers", "builders", "pushers", "network-retries"]
- )
+ scheduler.validate_keys(["on-error", "fetchers", "builders", "pushers", "network-retries"])
self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
self.sched_fetchers = scheduler.get_int("fetchers")
self.sched_builders = scheduler.get_int("builders")
@@ -395,9 +370,7 @@ class Context:
if self.build_dependencies not in ["plan", "all"]:
provenance = build.get_scalar("dependencies").get_provenance()
raise LoadError(
- "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(
- provenance
- ),
+ "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(provenance),
LoadErrorReason.INVALID_DATA,
)
@@ -408,14 +381,7 @@ class Context:
# on the overrides are expected to validate elsewhere.
for overrides in self._project_overrides.values():
overrides.validate_keys(
- [
- "artifacts",
- "source-caches",
- "options",
- "strict",
- "default-mirror",
- "remote-execution",
- ]
+ ["artifacts", "source-caches", "options", "strict", "default-mirror", "remote-execution",]
)
@property
@@ -567,10 +533,7 @@ class Context:
log_level = CASLogLevel.WARNING
self._cascache = CASCache(
- self.cachedir,
- casd=self.use_casd,
- cache_quota=self.config_cache_quota,
- log_level=log_level,
+ self.cachedir, casd=self.use_casd, cache_quota=self.config_cache_quota, log_level=log_level,
)
return self._cascache
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index f05e38162..89dfb49be 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -52,9 +52,7 @@ def get_last_exception():
#
def get_last_task_error():
if "BST_TEST_SUITE" not in os.environ:
- raise BstError(
- "Getting the last task error is only supported when running tests"
- )
+ raise BstError("Getting the last task error is only supported when running tests")
global _last_task_error_domain
global _last_task_error_reason
@@ -109,9 +107,7 @@ class ErrorDomain(Enum):
# context can then be communicated back to the main process.
#
class BstError(Exception):
- def __init__(
- self, message, *, detail=None, domain=None, reason=None, temporary=False
- ):
+ def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
global _last_exception
super().__init__(message)
@@ -148,9 +144,7 @@ class BstError(Exception):
#
class PluginError(BstError):
def __init__(self, message, reason=None, temporary=False):
- super().__init__(
- message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False
- )
+ super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False)
# LoadErrorReason
@@ -266,9 +260,7 @@ class ImplError(BstError):
# Raised if the current platform is not supported.
class PlatformError(BstError):
def __init__(self, message, reason=None, detail=None):
- super().__init__(
- message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail
- )
+ super().__init__(message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail)
# SandboxError
@@ -277,9 +269,7 @@ class PlatformError(BstError):
#
class SandboxError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# SourceCacheError
@@ -288,9 +278,7 @@ class SandboxError(BstError):
#
class SourceCacheError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# ArtifactError
@@ -300,11 +288,7 @@ class SourceCacheError(BstError):
class ArtifactError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.ARTIFACT,
- reason=reason,
- temporary=True,
+ message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True,
)
@@ -314,9 +298,7 @@ class ArtifactError(BstError):
#
class RemoteError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason)
# CASError
@@ -326,11 +308,7 @@ class RemoteError(BstError):
class CASError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.CAS,
- reason=reason,
- temporary=True,
+ message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True,
)
@@ -355,9 +333,7 @@ class CASCacheError(CASError):
#
class PipelineError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason)
# StreamError
@@ -373,9 +349,7 @@ class StreamError(BstError):
if message is None:
message = ""
- super().__init__(
- message, detail=detail, domain=ErrorDomain.STREAM, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.STREAM, reason=reason)
self.terminated = terminated
@@ -404,6 +378,4 @@ class SkipJob(Exception):
#
class ArtifactElementError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 3be035c0c..9a12f3083 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -289,8 +289,7 @@ class App:
# don't stop them with an offer to create a project for them.
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
click.echo(
- "No project found. You can create a new project like so:",
- err=True,
+ "No project found. You can create a new project like so:", err=True,
)
click.echo("", err=True)
click.echo(" bst init", err=True)
@@ -309,13 +308,9 @@ class App:
if session_name:
elapsed = self.stream.elapsed_time
- if (
- isinstance(e, StreamError) and e.terminated
- ): # pylint: disable=no-member
+ if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
self._message(
- MessageType.WARN,
- session_name + " Terminated",
- elapsed=elapsed,
+ MessageType.WARN, session_name + " Terminated", elapsed=elapsed,
)
else:
self._message(MessageType.FAIL, session_name, elapsed=elapsed)
@@ -330,8 +325,7 @@ class App:
self._error_exit(e)
except RecursionError:
click.echo(
- "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
- err=True,
+ "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", err=True,
)
sys.exit(-1)
@@ -339,9 +333,7 @@ class App:
# No exceptions occurred, print session time and summary
if session_name:
self._message(
- MessageType.SUCCESS,
- session_name,
- elapsed=self.stream.elapsed_time,
+ MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time,
)
if self._started:
self._print_summary()
@@ -389,8 +381,7 @@ class App:
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
if not force and os.path.exists(project_path):
raise AppError(
- "A project.conf already exists at: {}".format(project_path),
- reason="project-exists",
+ "A project.conf already exists at: {}".format(project_path), reason="project-exists",
)
if project_name:
@@ -407,11 +398,7 @@ class App:
)
else:
# Collect the parameters using an interactive session
- (
- project_name,
- format_version,
- element_path,
- ) = self._init_project_interactive(
+ (project_name, format_version, element_path,) = self._init_project_interactive(
project_name, format_version, element_path
)
@@ -419,20 +406,14 @@ class App:
try:
os.makedirs(directory, exist_ok=True)
except IOError as e:
- raise AppError(
- "Error creating project directory {}: {}".format(directory, e)
- ) from e
+ raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
# Create the elements sub-directory if it doesnt exist
elements_path = os.path.join(directory, element_path)
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
- raise AppError(
- "Error creating elements sub-directory {}: {}".format(
- elements_path, e
- )
- ) from e
+ raise AppError("Error creating elements sub-directory {}: {}".format(elements_path, e)) from e
# Dont use ruamel.yaml here, because it doesnt let
# us programatically insert comments or whitespace at
@@ -554,11 +535,7 @@ class App:
def _maybe_render_status(self):
# If we're suspended or terminating, then dont render the status area
- if (
- self._status
- and self.stream
- and not (self.stream.suspended or self.stream.terminated)
- ):
+ if self._status and self.stream and not (self.stream.suspended or self.stream.terminated):
self._status.render()
#
@@ -591,9 +568,7 @@ class App:
try:
choice = click.prompt(
"Choice:",
- value_proc=_prefix_choice_value_proc(
- ["continue", "quit", "terminate"]
- ),
+ value_proc=_prefix_choice_value_proc(["continue", "quit", "terminate"]),
default="continue",
err=True,
)
@@ -641,9 +616,7 @@ class App:
self._status.clear()
click.echo(
"\n\n\nBUG: Message handling out of sync, "
- + "unable to retrieve failure message for element {}\n\n\n\n\n".format(
- full_name
- ),
+ + "unable to retrieve failure message for element {}\n\n\n\n\n".format(full_name),
err=True,
)
else:
@@ -682,9 +655,7 @@ class App:
if failure.logfile:
summary += " (l)og - View the full log file\n"
if failure.sandbox:
- summary += (
- " (s)hell - Drop into a shell in the failed build sandbox\n"
- )
+ summary += " (s)hell - Drop into a shell in the failed build sandbox\n"
summary += "\nPressing ^C will terminate jobs and exit\n"
choices = ["continue", "quit", "terminate", "retry"]
@@ -698,16 +669,12 @@ class App:
click.echo(summary, err=True)
self._notify(
- "BuildStream failure",
- "{} on element {}".format(failure.action_name, full_name),
+ "BuildStream failure", "{} on element {}".format(failure.action_name, full_name),
)
try:
choice = click.prompt(
- "Choice:",
- default="continue",
- err=True,
- value_proc=_prefix_choice_value_proc(choices),
+ "Choice:", default="continue", err=True, value_proc=_prefix_choice_value_proc(choices),
)
except click.Abort:
# Ensure a newline after automatically printed '^C'
@@ -718,26 +685,17 @@ class App:
#
if choice == "shell":
click.echo(
- "\nDropping into an interactive shell in the failed build sandbox\n",
- err=True,
+ "\nDropping into an interactive shell in the failed build sandbox\n", err=True,
)
try:
unique_id, element_key = element
prompt = self.shell_prompt(full_name, element_key)
self.stream.shell(
- None,
- Scope.BUILD,
- prompt,
- isolate=True,
- usebuildtree="always",
- unique_id=unique_id,
+ None, Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id,
)
except BstError as e:
click.echo(
- "Error while attempting to create interactive shell: {}".format(
- e
- ),
- err=True,
+ "Error while attempting to create interactive shell: {}".format(e), err=True,
)
elif choice == "log":
with open(failure.logfile, "r") as logfile:
@@ -752,9 +710,7 @@ class App:
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
elif choice == "continue":
- click.echo(
- "\nContinuing with other non failing elements\n", err=True
- )
+ click.echo("\nContinuing with other non failing elements\n", err=True)
elif choice == "retry":
click.echo("\nRetrying failed job\n", err=True)
unique_id = element[0]
@@ -768,10 +724,7 @@ class App:
self._started = True
if self._session_name:
self.logger.print_heading(
- self.project,
- self.stream,
- log_file=self._main_options["log_file"],
- styling=self.colors,
+ self.project, self.stream, log_file=self._main_options["log_file"], styling=self.colors,
)
#
@@ -779,9 +732,7 @@ class App:
#
def _print_summary(self):
click.echo("", err=True)
- self.logger.print_summary(
- self.stream, self._main_options["log_file"], styling=self.colors
- )
+ self.logger.print_summary(self.stream, self._main_options["log_file"], styling=self.colors)
# _error_exit()
#
@@ -797,9 +748,7 @@ class App:
def _error_exit(self, error, prefix=None):
click.echo("", err=True)
- if (
- self.context is None or self.context.log_debug is None
- ): # Context might not be initialized, default to cmd
+ if self.context is None or self.context.log_debug is None: # Context might not be initialized, default to cmd
debug = self._main_options["debug"]
else:
debug = self.context.log_debug
@@ -831,10 +780,7 @@ class App:
return
# Hold on to the failure messages
- if (
- message.message_type in [MessageType.FAIL, MessageType.BUG]
- and message.element_name is not None
- ):
+ if message.message_type in [MessageType.FAIL, MessageType.BUG] and message.element_name is not None:
self._fail_messages[message.element_name] = message
# Send to frontend if appropriate
@@ -866,9 +812,8 @@ class App:
# Some validation routines for project initialization
#
def _assert_format_version(self, format_version):
- message = (
- "The version must be supported by this "
- + "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+ message = "The version must be supported by this " + "version of buildstream (0 - {})\n".format(
+ BST_FORMAT_VERSION
)
# Validate that it is an integer
@@ -911,9 +856,7 @@ class App:
# format_version (int): The user selected format version
# element_path (str): The user selected element path
#
- def _init_project_interactive(
- self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"
- ):
+ def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"):
def project_name_proc(user_input):
try:
node._assert_symbol_name(user_input, "project name")
@@ -940,12 +883,8 @@ class App:
# Collect project name
click.echo("", err=True)
- click.echo(
- self._content_profile.fmt("Choose a unique name for your project"), err=True
- )
- click.echo(
- self._format_profile.fmt("-------------------------------------"), err=True
- )
+ click.echo(self._content_profile.fmt("Choose a unique name for your project"), err=True)
+ click.echo(self._format_profile.fmt("-------------------------------------"), err=True)
click.echo("", err=True)
click.echo(
self._detail_profile.fmt(
@@ -969,25 +908,15 @@ class App:
err=True,
)
click.echo("", err=True)
- project_name = click.prompt(
- self._content_profile.fmt("Project name"),
- value_proc=project_name_proc,
- err=True,
- )
+ project_name = click.prompt(self._content_profile.fmt("Project name"), value_proc=project_name_proc, err=True,)
click.echo("", err=True)
# Collect format version
click.echo(
- self._content_profile.fmt(
- "Select the minimum required format version for your project"
- ),
- err=True,
+ self._content_profile.fmt("Select the minimum required format version for your project"), err=True,
)
click.echo(
- self._format_profile.fmt(
- "-----------------------------------------------------------"
- ),
- err=True,
+ self._format_profile.fmt("-----------------------------------------------------------"), err=True,
)
click.echo("", err=True)
click.echo(
@@ -1047,10 +976,7 @@ class App:
)
click.echo("", err=True)
element_path = click.prompt(
- self._content_profile.fmt("Element path"),
- value_proc=element_path_proc,
- default=element_path,
- err=True,
+ self._content_profile.fmt("Element path"), value_proc=element_path_proc, default=element_path, err=True,
)
return (project_name, format_version, element_path)
@@ -1070,9 +996,7 @@ class App:
#
def _prefix_choice_value_proc(choices):
def value_proc(user_input):
- remaining_candidate = [
- choice for choice in choices if choice.startswith(user_input)
- ]
+ remaining_candidate = [choice for choice in choices if choice.startswith(user_input)]
if not remaining_candidate:
raise UsageError("Expected one of {}, got {}".format(choices, user_input))
@@ -1080,10 +1004,6 @@ def _prefix_choice_value_proc(choices):
if len(remaining_candidate) == 1:
return remaining_candidate[0]
else:
- raise UsageError(
- "Ambiguous input. '{}' can refer to one of {}".format(
- user_input, remaining_candidate
- )
- )
+ raise UsageError("Ambiguous input. '{}' can refer to one of {}".format(user_input, remaining_candidate))
return value_proc
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 9a73ab375..bffcf3786 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -54,9 +54,7 @@ def search_command(args, *, context=None):
command = command_ctx.command.get_command(command_ctx, cmd)
if command is None:
return None
- command_ctx = command.make_context(
- command.name, [command.name], parent=command_ctx, resilient_parsing=True
- )
+ command_ctx = command.make_context(command.name, [command.name], parent=command_ctx, resilient_parsing=True)
return command_ctx
@@ -64,11 +62,7 @@ def search_command(args, *, context=None):
# Completion for completing command names as help arguments
def complete_commands(cmd, args, incomplete):
command_ctx = search_command(args[1:])
- if (
- command_ctx
- and command_ctx.command
- and isinstance(command_ctx.command, click.MultiCommand)
- ):
+ if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
return [
subcommand + " "
for subcommand in command_ctx.command.list_commands(command_ctx)
@@ -108,9 +102,7 @@ def complete_target(args, incomplete):
else:
# Check if this directory or any of its parent directories
# contain a project config file
- base_directory, _ = utils._search_upward_for_files(
- base_directory, [project_conf]
- )
+ base_directory, _ = utils._search_upward_for_files(base_directory, [project_conf])
if base_directory is None:
# No project_conf was found in base_directory or its parents, no need
@@ -164,11 +156,7 @@ def complete_artifact(orig_args, args, incomplete):
# element targets are valid artifact names
complete_list = complete_target(args, incomplete)
- complete_list.extend(
- ref
- for ref in ctx.artifactcache.list_artifacts()
- if ref.startswith(incomplete)
- )
+ complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
return complete_list
@@ -216,10 +204,7 @@ def validate_output_streams():
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
if flags & os.O_NONBLOCK:
click.echo(
- "{} is currently set to O_NONBLOCK, try opening a new shell".format(
- stream.name
- ),
- err=True,
+ "{} is currently set to O_NONBLOCK, try opening a new shell".format(stream.name), err=True,
)
sys.exit(-1)
@@ -232,8 +217,7 @@ def handle_bst_force_start_method_env():
if existing_start_method is None:
multiprocessing.set_start_method(start_method)
print(
- bst_force_start_method_str
- + ": multiprocessing start method forced to:",
+ bst_force_start_method_str + ": multiprocessing start method forced to:",
start_method,
file=sys.stderr,
flush=True,
@@ -243,16 +227,14 @@ def handle_bst_force_start_method_env():
# multiple times in the same executable, so guard against that
# here.
print(
- bst_force_start_method_str
- + ": multiprocessing start method already set to:",
+ bst_force_start_method_str + ": multiprocessing start method already set to:",
existing_start_method,
file=sys.stderr,
flush=True,
)
else:
print(
- bst_force_start_method_str
- + ": cannot set multiprocessing start method to:",
+ bst_force_start_method_str + ": cannot set multiprocessing start method to:",
start_method,
", already set to:",
existing_start_method,
@@ -262,9 +244,7 @@ def handle_bst_force_start_method_env():
sys.exit(-1)
-def override_main(
- self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra
-):
+def override_main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra):
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
@@ -295,14 +275,7 @@ def override_main(
# case of testing, our tests preceed our entrypoint, so we do our best.
handle_bst_force_start_method_env()
- original_main(
- self,
- args=args,
- prog_name=prog_name,
- complete_var=None,
- standalone_mode=standalone_mode,
- **extra
- )
+ original_main(self, args=args, prog_name=prog_name, complete_var=None, standalone_mode=standalone_mode, **extra)
original_main = click.BaseCommand.main
@@ -325,14 +298,9 @@ def print_version(ctx, param, value):
@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.option("--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True)
@click.option(
- "--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True
-)
-@click.option(
- "--config",
- "-c",
- type=click.Path(exists=True, dir_okay=False, readable=True),
- help="Configuration file to use",
+ "--config", "-c", type=click.Path(exists=True, dir_okay=False, readable=True), help="Configuration file to use",
)
@click.option(
"--directory",
@@ -348,47 +316,26 @@ def print_version(ctx, param, value):
help="What to do when an error is encountered",
)
@click.option(
- "--fetchers",
- type=click.INT,
- default=None,
- help="Maximum simultaneous download tasks",
+ "--fetchers", type=click.INT, default=None, help="Maximum simultaneous download tasks",
)
+@click.option("--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks")
+@click.option("--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks")
@click.option(
- "--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks"
+ "--max-jobs", type=click.INT, default=None, help="Number of parallel jobs allowed for a given build task",
)
@click.option(
- "--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks"
+ "--network-retries", type=click.INT, default=None, help="Maximum retries for network tasks",
)
@click.option(
- "--max-jobs",
- type=click.INT,
- default=None,
- help="Number of parallel jobs allowed for a given build task",
-)
-@click.option(
- "--network-retries",
- type=click.INT,
- default=None,
- help="Maximum retries for network tasks",
-)
-@click.option(
- "--no-interactive",
- is_flag=True,
- help="Force non interactive mode, otherwise this is automatically decided",
+ "--no-interactive", is_flag=True, help="Force non interactive mode, otherwise this is automatically decided",
)
@click.option("--verbose/--no-verbose", default=None, help="Be extra verbose")
@click.option("--debug/--no-debug", default=None, help="Print debugging output")
@click.option(
- "--error-lines",
- type=click.INT,
- default=None,
- help="Maximum number of lines to show from a task log",
+ "--error-lines", type=click.INT, default=None, help="Maximum number of lines to show from a task log",
)
@click.option(
- "--message-lines",
- type=click.INT,
- default=None,
- help="Maximum number of lines to show in a detailed message",
+ "--message-lines", type=click.INT, default=None, help="Maximum number of lines to show in a detailed message",
)
@click.option(
"--log-file",
@@ -396,9 +343,7 @@ def print_version(ctx, param, value):
help="A file to store the main log (allows storing the main log while in interactive mode)",
)
@click.option(
- "--colors/--no-colors",
- default=None,
- help="Force enable/disable ANSI color codes in output",
+ "--colors/--no-colors", default=None, help="Force enable/disable ANSI color codes in output",
)
@click.option(
"--strict/--no-strict",
@@ -415,9 +360,7 @@ def print_version(ctx, param, value):
help="Specify a project option",
)
@click.option(
- "--default-mirror",
- default=None,
- help="The mirror to fetch from first, before attempting other mirrors",
+ "--default-mirror", default=None, help="The mirror to fetch from first, before attempting other mirrors",
)
@click.option(
"--pull-buildtrees",
@@ -450,9 +393,7 @@ def cli(context, **kwargs):
# Help Command #
##################################################################
@cli.command(
- name="help",
- short_help="Print usage information",
- context_settings={"help_option_names": []},
+ name="help", short_help="Print usage information", context_settings={"help_option_names": []},
)
@click.argument("command", nargs=-1, metavar="COMMAND")
@click.pass_context
@@ -462,10 +403,7 @@ def help_command(ctx, command):
command_ctx = search_command(command, context=ctx.parent)
if not command_ctx:
click.echo(
- "Not a valid command: '{} {}'".format(
- ctx.parent.info_name, " ".join(command)
- ),
- err=True,
+ "Not a valid command: '{} {}'".format(ctx.parent.info_name, " ".join(command)), err=True,
)
sys.exit(-1)
@@ -477,10 +415,7 @@ def help_command(ctx, command):
if command:
detail = " {} ".format(" ".join(command))
click.echo(
- "\nFor usage on a specific command: {} help{}COMMAND".format(
- ctx.parent.info_name, detail
- ),
- err=True,
+ "\nFor usage on a specific command: {} help{}COMMAND".format(ctx.parent.info_name, detail), err=True,
)
@@ -503,14 +438,9 @@ def help_command(ctx, command):
show_default=True,
help="The subdirectory to store elements in",
)
-@click.option(
- "--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf"
-)
+@click.option("--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf")
@click.argument(
- "target-directory",
- nargs=1,
- required=False,
- type=click.Path(file_okay=False, writable=True),
+ "target-directory", nargs=1, required=False, type=click.Path(file_okay=False, writable=True),
)
@click.pass_obj
def init(app, project_name, format_version, element_path, force, target_directory):
@@ -522,9 +452,7 @@ def init(app, project_name, format_version, element_path, force, target_director
Unless `--project-name` is specified, this will be an
interactive session.
"""
- app.init_project(
- project_name, format_version, element_path, force, target_directory
- )
+ app.init_project(project_name, format_version, element_path, force, target_directory)
##################################################################
@@ -532,17 +460,10 @@ def init(app, project_name, format_version, element_path, force, target_director
##################################################################
@cli.command(short_help="Build elements in a pipeline")
@click.option(
- "--deps",
- "-d",
- default=None,
- type=click.Choice(["plan", "all"]),
- help="The dependencies to build",
+ "--deps", "-d", default=None, type=click.Choice(["plan", "all"]), help="The dependencies to build",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -574,10 +495,7 @@ def build(app, elements, deps, remote):
ignore_junction_targets = True
app.stream.build(
- elements,
- selection=deps,
- ignore_junction_targets=ignore_junction_targets,
- remote=remote,
+ elements, selection=deps, ignore_junction_targets=ignore_junction_targets, remote=remote,
)
@@ -586,11 +504,7 @@ def build(app, elements, deps, remote):
##################################################################
@cli.command(short_help="Show elements in the pipeline")
@click.option(
- "--except",
- "except_",
- multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies",
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies",
)
@click.option(
"--deps",
@@ -679,9 +593,7 @@ def show(app, elements, deps, except_, order, format_):
if not elements:
elements = app.project.get_default_targets()
- dependencies = app.stream.load_selection(
- elements, selection=deps, except_targets=except_
- )
+ dependencies = app.stream.load_selection(elements, selection=deps, except_targets=except_)
if order == "alpha":
dependencies = sorted(dependencies)
@@ -698,11 +610,7 @@ def show(app, elements, deps, except_, order, format_):
##################################################################
@cli.command(short_help="Shell into an element's sandbox environment")
@click.option(
- "--build",
- "-b",
- "build_",
- is_flag=True,
- help="Stage dependencies and sources to build",
+ "--build", "-b", "build_", is_flag=True, help="Stage dependencies and sources to build",
)
@click.option(
"--sysroot",
@@ -726,16 +634,10 @@ def show(app, elements, deps, except_, order, format_):
type=click.Choice(["ask", "try", "always", "never"]),
default="ask",
show_default=True,
- help=(
- "Use a buildtree. If `always` is set, will always fail to "
- "build if a buildtree is not available."
- ),
+ help=("Use a buildtree. If `always` is set, will always fail to " "build if a buildtree is not available."),
)
@click.option(
- "--pull",
- "pull_",
- is_flag=True,
- help="Attempt to pull missing or incomplete artifacts",
+ "--pull", "pull_", is_flag=True, help="Attempt to pull missing or incomplete artifacts",
)
@click.argument("element", required=False, type=click.Path(readable=False))
@click.argument("command", type=click.STRING, nargs=-1)
@@ -782,9 +684,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if not element:
raise AppError('Missing argument "ELEMENT".')
- elements = app.stream.load_selection(
- (element,), selection=selection, use_artifact_config=True
- )
+ elements = app.stream.load_selection((element,), selection=selection, use_artifact_config=True)
# last one will be the element we want to stage, previous ones are
# elements to try and pull
@@ -811,20 +711,15 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
else:
if cli_buildtree == "always":
# Exit early if it won't be possible to even fetch a buildtree with always option
- raise AppError(
- "Artifact was created without buildtree, unable to launch shell with it"
- )
+ raise AppError("Artifact was created without buildtree, unable to launch shell with it")
click.echo(
- "WARNING: Artifact created without buildtree, shell will be loaded without it",
- err=True,
+ "WARNING: Artifact created without buildtree, shell will be loaded without it", err=True,
)
else:
# If the value has defaulted to ask and in non interactive mode, don't consider the buildtree, this
# being the default behaviour of the command
if app.interactive and cli_buildtree == "ask":
- if cached and bool(
- click.confirm("Do you want to use the cached buildtree?")
- ):
+ if cached and bool(click.confirm("Do you want to use the cached buildtree?")):
use_buildtree = "always"
elif buildtree_exists:
try:
@@ -858,9 +753,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
pull_dependencies=pull_dependencies,
)
except BstError as e:
- raise AppError(
- "Error launching shell: {}".format(e), detail=e.detail
- ) from e
+ raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
# If there were no errors, we return the shell's exit code here.
sys.exit(exitcode)
@@ -894,22 +787,13 @@ def source():
help="The dependencies to fetch",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track new source references before fetching",
+ "--track", "track_", is_flag=True, help="Track new source references before fetching",
)
@click.option(
- "--track-cross-junctions",
- "-J",
- is_flag=True,
- help="Allow tracking to cross junction boundaries",
+ "--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote source cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote source cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -939,8 +823,7 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
if track_cross_junctions and not track_:
click.echo(
- "ERROR: The --track-cross-junctions option can only be used with --track",
- err=True,
+ "ERROR: The --track-cross-junctions option can only be used with --track", err=True,
)
sys.exit(-1)
@@ -985,9 +868,7 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
type=click.Choice(["none", "all"]),
help="The dependencies to track",
)
-@click.option(
- "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
-)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_track(app, elements, deps, except_, cross_junctions):
@@ -1021,10 +902,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
if deps == "none":
deps = "redirect"
app.stream.track(
- elements,
- selection=deps,
- except_targets=except_,
- cross_junctions=cross_junctions,
+ elements, selection=deps, except_targets=except_, cross_junctions=cross_junctions,
)
@@ -1034,11 +912,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
@source.command(name="checkout", short_help="Checkout sources of an element")
@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
@click.option(
- "--except",
- "except_",
- multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies",
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies",
)
@click.option(
"--deps",
@@ -1070,9 +944,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
)
@click.argument("element", required=False, type=click.Path(readable=False))
@click.pass_obj
-def source_checkout(
- app, element, directory, force, deps, except_, tar, compression, build_scripts
-):
+def source_checkout(app, element, directory, force, deps, except_, tar, compression, build_scripts):
"""Checkout sources of an element to the specified location
When this command is executed from a workspace directory, the default
@@ -1122,9 +994,7 @@ def workspace():
##################################################################
@workspace.command(name="open", short_help="Open a new workspace")
@click.option(
- "--no-checkout",
- is_flag=True,
- help="Do not checkout the source, only link to the given directory",
+ "--no-checkout", is_flag=True, help="Do not checkout the source, only link to the given directory",
)
@click.option(
"--force",
@@ -1134,10 +1004,7 @@ def workspace():
+ "or if a workspace for that element already exists",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track and fetch new source references before checking out the workspace",
+ "--track", "track_", is_flag=True, help="Track and fetch new source references before checking out the workspace",
)
@click.option(
"--directory",
@@ -1152,11 +1019,7 @@ def workspace_open(app, no_checkout, force, track_, directory, elements):
with app.initialized():
app.stream.workspace_open(
- elements,
- no_checkout=no_checkout,
- track_first=track_,
- force=force,
- custom_dir=directory,
+ elements, no_checkout=no_checkout, track_first=track_, force=force, custom_dir=directory,
)
@@ -1165,9 +1028,7 @@ def workspace_open(app, no_checkout, force, track_, directory, elements):
##################################################################
@workspace.command(name="close", short_help="Close workspaces")
@click.option(
- "--remove-dir",
- is_flag=True,
- help="Remove the path that contains the closed workspace",
+ "--remove-dir", is_flag=True, help="Remove the path that contains the closed workspace",
)
@click.option("--all", "-a", "all_", is_flag=True, help="Close all open workspaces")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@@ -1193,9 +1054,7 @@ def workspace_close(app, remove_dir, all_, elements):
sys.exit(0)
if all_:
- elements = [
- element_name for element_name, _ in app.context.get_workspaces().list()
- ]
+ elements = [element_name for element_name, _ in app.context.get_workspaces().list()]
elements = app.stream.redirect_element_names(elements)
@@ -1227,14 +1086,9 @@ def workspace_close(app, remove_dir, all_, elements):
# Workspace Reset Command #
##################################################################
@workspace.command(name="reset", short_help="Reset a workspace to its original state")
+@click.option("--soft", is_flag=True, help="Reset workspace state without affecting its contents")
@click.option(
- "--soft", is_flag=True, help="Reset workspace state without affecting its contents"
-)
-@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track and fetch the latest source before resetting",
+ "--track", "track_", is_flag=True, help="Track and fetch the latest source before resetting",
)
@click.option("--all", "-a", "all_", is_flag=True, help="Reset all open workspaces")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@@ -1256,9 +1110,7 @@ def workspace_reset(app, soft, track_, all_, elements):
raise AppError("No open workspaces to reset")
if all_:
- elements = tuple(
- element_name for element_name, _ in app.context.get_workspaces().list()
- )
+ elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
app.stream.workspace_reset(elements, soft=soft, track_first=track_)
@@ -1344,15 +1196,10 @@ def artifact_show(app, deps, artifacts):
help="The dependencies to checkout",
)
@click.option(
- "--integrate/--no-integrate",
- default=None,
- is_flag=True,
- help="Whether to run integration commands",
+ "--integrate/--no-integrate", default=None, is_flag=True, help="Whether to run integration commands",
)
@click.option(
- "--hardlinks",
- is_flag=True,
- help="Checkout hardlinks instead of copying if possible",
+ "--hardlinks", is_flag=True, help="Checkout hardlinks instead of copying if possible",
)
@click.option(
"--tar",
@@ -1370,22 +1217,14 @@ def artifact_show(app, deps, artifacts):
help="The compression option of the tarball created.",
)
@click.option(
- "--pull",
- "pull_",
- is_flag=True,
- help="Pull the artifact if it's missing or incomplete.",
+ "--pull", "pull_", is_flag=True, help="Pull the artifact if it's missing or incomplete.",
)
@click.option(
- "--directory",
- default=None,
- type=click.Path(file_okay=False),
- help="The directory to checkout the artifact to",
+ "--directory", default=None, type=click.Path(file_okay=False), help="The directory to checkout the artifact to",
)
@click.argument("target", required=False, type=click.Path(readable=False))
@click.pass_obj
-def artifact_checkout(
- app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target
-):
+def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target):
"""Checkout contents of an artifact
When this command is executed from a workspace directory, the default
@@ -1402,8 +1241,7 @@ def artifact_checkout(
if not tar:
if compression:
click.echo(
- "ERROR: --compression can only be provided if --tar is provided",
- err=True,
+ "ERROR: --compression can only be provided if --tar is provided", err=True,
)
sys.exit(-1)
else:
@@ -1420,15 +1258,10 @@ def artifact_checkout(
inferred_compression = _get_compression(tar)
except UtilError as e:
click.echo(
- "ERROR: Invalid file extension given with '--tar': {}".format(e),
- err=True,
+ "ERROR: Invalid file extension given with '--tar': {}".format(e), err=True,
)
sys.exit(-1)
- if (
- compression
- and inferred_compression != ""
- and inferred_compression != compression
- ):
+ if compression and inferred_compression != "" and inferred_compression != compression:
click.echo(
"WARNING: File extension and compression differ."
"File extension has been overridden by --compression",
@@ -1469,10 +1302,7 @@ def artifact_checkout(
help="The dependency artifacts to pull",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -1506,10 +1336,7 @@ def artifact_pull(app, artifacts, deps, remote):
ignore_junction_targets = True
app.stream.pull(
- artifacts,
- selection=deps,
- remote=remote,
- ignore_junction_targets=ignore_junction_targets,
+ artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets,
)
@@ -1526,10 +1353,7 @@ def artifact_pull(app, artifacts, deps, remote):
help="The dependencies to push",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -1565,10 +1389,7 @@ def artifact_push(app, artifacts, deps, remote):
ignore_junction_targets = True
app.stream.push(
- artifacts,
- selection=deps,
- remote=remote,
- ignore_junction_targets=ignore_junction_targets,
+ artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets,
)
@@ -1624,11 +1445,7 @@ def artifact_log(app, artifacts, out):
################################################################
@artifact.command(name="list-contents", short_help="List the contents of an artifact")
@click.option(
- "--long",
- "-l",
- "long_",
- is_flag=True,
- help="Provide more information about the contents of the artifact.",
+ "--long", "-l", "long_", is_flag=True, help="Provide more information about the contents of the artifact.",
)
@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
@@ -1698,23 +1515,15 @@ def artifact_delete(app, artifacts, deps):
help="The dependencies to fetch",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track new source references before fetching",
+ "--track", "track_", is_flag=True, help="Track new source references before fetching",
)
@click.option(
- "--track-cross-junctions",
- "-J",
- is_flag=True,
- help="Allow tracking to cross junction boundaries",
+ "--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def fetch(app, elements, deps, track_, except_, track_cross_junctions):
- click.echo(
- "This command is now obsolete. Use `bst source fetch` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
sys.exit(1)
@@ -1737,15 +1546,11 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
type=click.Choice(["none", "all"]),
help="The dependencies to track",
)
-@click.option(
- "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
-)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def track(app, elements, deps, except_, cross_junctions):
- click.echo(
- "This command is now obsolete. Use `bst source track` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
sys.exit(1)
@@ -1763,14 +1568,10 @@ def track(app, elements, deps, except_, cross_junctions):
help="The dependencies to checkout",
)
@click.option(
- "--integrate/--no-integrate",
- default=True,
- help="Run integration commands (default is to run commands)",
+ "--integrate/--no-integrate", default=True, help="Run integration commands (default is to run commands)",
)
@click.option(
- "--hardlinks",
- is_flag=True,
- help="Checkout hardlinks instead of copies (handle with care)",
+ "--hardlinks", is_flag=True, help="Checkout hardlinks instead of copies (handle with care)",
)
@click.option(
"--tar",
@@ -1804,16 +1605,12 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
help="The dependency artifacts to pull",
)
@click.option(
- "--remote",
- "-r",
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def pull(app, elements, deps, remote):
- click.echo(
- "This command is now obsolete. Use `bst artifact pull` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst artifact pull` instead.", err=True)
sys.exit(1)
@@ -1830,15 +1627,10 @@ def pull(app, elements, deps, remote):
help="The dependencies to push",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def push(app, elements, deps, remote):
- click.echo(
- "This command is now obsolete. Use `bst artifact push` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst artifact push` instead.", err=True)
sys.exit(1)
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 35d1cb1a3..4ac596287 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -170,12 +170,7 @@ def resolve_ctx(cli, prog_name, args):
cmd = ctx.command.get_command(ctx, args_remaining[0])
if cmd is None:
return None
- ctx = cmd.make_context(
- args_remaining[0],
- args_remaining[1:],
- parent=ctx,
- resilient_parsing=True,
- )
+ ctx = cmd.make_context(args_remaining[0], args_remaining[1:], parent=ctx, resilient_parsing=True,)
args_remaining = ctx.protected_args + ctx.args
else:
ctx = ctx.parent
@@ -202,9 +197,7 @@ def is_incomplete_option(all_args, cmd_param):
if cmd_param.is_flag:
return False
last_option = None
- for index, arg_str in enumerate(
- reversed([arg for arg in all_args if arg != WORDBREAK])
- ):
+ for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])):
if index + 1 > cmd_param.nargs:
break
if start_of_option(arg_str):
@@ -295,38 +288,22 @@ def get_choices(cli, prog_name, args, incomplete, override):
if not found_param:
# completion for option values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Option) and is_incomplete_option(
- all_args, cmd_param
- ):
- choices.extend(
- get_user_autocompletions(
- all_args, incomplete, ctx.command, cmd_param, override
- )
- )
+ if isinstance(cmd_param, Option) and is_incomplete_option(all_args, cmd_param):
+ choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
found_param = True
break
if not found_param:
# completion for argument values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Argument) and is_incomplete_argument(
- ctx.params, cmd_param
- ):
- choices.extend(
- get_user_autocompletions(
- all_args, incomplete, ctx.command, cmd_param, override
- )
- )
+ if isinstance(cmd_param, Argument) and is_incomplete_argument(ctx.params, cmd_param):
+ choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
found_param = True
break
if not found_param and isinstance(ctx.command, MultiCommand):
# completion for any subcommands
choices.extend(
- [
- cmd + " "
- for cmd in ctx.command.list_commands(ctx)
- if not ctx.command.get_command(ctx, cmd).hidden
- ]
+ [cmd + " " for cmd in ctx.command.list_commands(ctx) if not ctx.command.get_command(ctx, cmd).hidden]
)
if (
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 577fd40c5..0fd44d09b 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -50,15 +50,7 @@ class Status:
_TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
- colors=False,
+ self, context, state, content_profile, format_profile, success_profile, error_profile, stream, colors=False,
):
self._context = context
@@ -73,13 +65,7 @@ class Status:
self._spacing = 1
self._colors = colors
self._header = _StatusHeader(
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
+ context, state, content_profile, format_profile, success_profile, error_profile, stream,
)
self._term_width, _ = click.get_terminal_size()
@@ -317,14 +303,7 @@ class Status:
def _add_job(self, action_name, full_name):
task = self._state.tasks[(action_name, full_name)]
elapsed = task.elapsed_offset
- job = _StatusJob(
- self._context,
- action_name,
- full_name,
- self._content_profile,
- self._format_profile,
- elapsed,
- )
+ job = _StatusJob(self._context, action_name, full_name, self._content_profile, self._format_profile, elapsed,)
self._jobs[(action_name, full_name)] = job
self._need_alloc = True
@@ -355,14 +334,7 @@ class Status:
#
class _StatusHeader:
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
+ self, context, state, content_profile, format_profile, success_profile, error_profile, stream,
):
#
@@ -518,9 +490,7 @@ class _StatusHeader:
# elapsed (datetime): The offset into the session when this job is created
#
class _StatusJob:
- def __init__(
- self, context, action_name, full_name, content_profile, format_profile, elapsed
- ):
+ def __init__(self, context, action_name, full_name, content_profile, format_profile, elapsed):
#
# Public members
#
@@ -612,13 +582,9 @@ class _StatusJob:
)
if self._current_progress is not None:
- text += self._format_profile.fmt(":") + self._content_profile.fmt(
- str(self._current_progress)
- )
+ text += self._format_profile.fmt(":") + self._content_profile.fmt(str(self._current_progress))
if self._maximum_progress is not None:
- text += self._format_profile.fmt("/") + self._content_profile.fmt(
- str(self._maximum_progress)
- )
+ text += self._format_profile.fmt("/") + self._content_profile.fmt(str(self._maximum_progress))
# Add padding before terminating ']'
terminator = (" " * padding) + "]"
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 8a605bb33..c7eac2b24 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -91,18 +91,12 @@ class WallclockTime(Widget):
fields = [
self.content_profile.fmt("{:02d}".format(x))
- for x in [
- message.creation_time.hour,
- message.creation_time.minute,
- message.creation_time.second,
- ]
+ for x in [message.creation_time.hour, message.creation_time.minute, message.creation_time.second,]
]
text = self.format_profile.fmt(":").join(fields)
if self._output_format == "us":
- text += self.content_profile.fmt(
- ".{:06d}".format(message.creation_time.microsecond)
- )
+ text += self.content_profile.fmt(".{:06d}".format(message.creation_time.microsecond))
return text
@@ -135,18 +129,13 @@ class TimeCode(Widget):
else:
hours, remainder = divmod(int(elapsed.total_seconds()), 60 * 60)
minutes, seconds = divmod(remainder, 60)
- fields = [
- self.content_profile.fmt("{0:02d}".format(field))
- for field in [hours, minutes, seconds]
- ]
+ fields = [self.content_profile.fmt("{0:02d}".format(field)) for field in [hours, minutes, seconds]]
text = self.format_profile.fmt(":").join(fields)
if self._microseconds:
if elapsed is not None:
- text += self.content_profile.fmt(
- ".{0:06d}".format(elapsed.microseconds)
- )
+ text += self.content_profile.fmt(".{0:06d}".format(elapsed.microseconds))
else:
text += self.content_profile.fmt(".------")
return text
@@ -270,17 +259,11 @@ class MessageOrLogFile(Widget):
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
self._message_widget = MessageText(context, content_profile, format_profile)
- self._logfile_widget = LogFile(
- context, content_profile, format_profile, err_profile
- )
+ self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
def render(self, message):
# Show the log file only in the main start/success messages
- if (
- message.logfile
- and message.scheduler
- and message.message_type in [MessageType.START, MessageType.SUCCESS]
- ):
+ if message.logfile and message.scheduler and message.message_type in [MessageType.START, MessageType.SUCCESS]:
text = self._logfile_widget.render(message)
else:
text = self._message_widget.render(message)
@@ -303,15 +286,7 @@ class MessageOrLogFile(Widget):
#
class LogLine(Widget):
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- err_profile,
- detail_profile,
- indent=4,
+ self, context, state, content_profile, format_profile, success_profile, err_profile, detail_profile, indent=4,
):
super().__init__(context, content_profile, format_profile)
@@ -326,34 +301,22 @@ class LogLine(Widget):
self._resolved_keys = None
self._state = state
- self._logfile_widget = LogFile(
- context, content_profile, format_profile, err_profile
- )
+ self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
if context.log_debug:
self._columns.extend([Debug(context, content_profile, format_profile)])
self.logfile_variable_names = {
- "elapsed": TimeCode(
- context, content_profile, format_profile, microseconds=False
- ),
- "elapsed-us": TimeCode(
- context, content_profile, format_profile, microseconds=True
- ),
+ "elapsed": TimeCode(context, content_profile, format_profile, microseconds=False),
+ "elapsed-us": TimeCode(context, content_profile, format_profile, microseconds=True),
"wallclock": WallclockTime(context, content_profile, format_profile),
- "wallclock-us": WallclockTime(
- context, content_profile, format_profile, output_format="us"
- ),
+ "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format="us"),
"key": CacheKey(context, content_profile, format_profile, err_profile),
"element": ElementName(context, content_profile, format_profile),
"action": TypeName(context, content_profile, format_profile),
- "message": MessageOrLogFile(
- context, content_profile, format_profile, err_profile
- ),
+ "message": MessageOrLogFile(context, content_profile, format_profile, err_profile),
}
- logfile_tokens = self._parse_logfile_format(
- context.log_message_format, content_profile, format_profile
- )
+ logfile_tokens = self._parse_logfile_format(context.log_message_format, content_profile, format_profile)
self._columns.extend(logfile_tokens)
# show_pipeline()
@@ -379,9 +342,7 @@ class LogLine(Widget):
full_key, cache_key, dim_keys = element._get_display_key()
- line = p.fmt_subst(
- line, "name", element._get_full_name(), fg="blue", bold=True
- )
+ line = p.fmt_subst(line, "name", element._get_full_name(), fg="blue", bold=True)
line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
@@ -393,9 +354,7 @@ class LogLine(Widget):
line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_success():
line = p.fmt_subst(line, "state", "cached", fg="magenta")
- elif (
- consistency == Consistency.RESOLVED and not element._source_cached()
- ):
+ elif consistency == Consistency.RESOLVED and not element._source_cached():
line = p.fmt_subst(line, "state", "fetch needed", fg="red")
elif element._buildable():
line = p.fmt_subst(line, "state", "buildable", fg="green")
@@ -407,53 +366,34 @@ class LogLine(Widget):
line = p.fmt_subst(
line,
"config",
- yaml.round_trip_dump(
- element._Element__config,
- default_flow_style=False,
- allow_unicode=True,
- ),
+ yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True,),
)
# Variables
if "%{vars" in format_:
variables = element._Element__variables.flat
line = p.fmt_subst(
- line,
- "vars",
- yaml.round_trip_dump(
- variables, default_flow_style=False, allow_unicode=True
- ),
+ line, "vars", yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True),
)
# Environment
if "%{env" in format_:
environment = element._Element__environment
line = p.fmt_subst(
- line,
- "env",
- yaml.round_trip_dump(
- environment, default_flow_style=False, allow_unicode=True
- ),
+ line, "env", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True),
)
# Public
if "%{public" in format_:
environment = element._Element__public
line = p.fmt_subst(
- line,
- "public",
- yaml.round_trip_dump(
- environment, default_flow_style=False, allow_unicode=True
- ),
+ line, "public", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True),
)
# Workspaced
if "%{workspaced" in format_:
line = p.fmt_subst(
- line,
- "workspaced",
- "(workspaced)" if element._get_workspace() else "",
- fg="yellow",
+ line, "workspaced", "(workspaced)" if element._get_workspace() else "", fg="yellow",
)
# Workspace-dirs
@@ -463,39 +403,25 @@ class LogLine(Widget):
path = workspace.get_absolute_path()
if path.startswith("~/"):
path = os.path.join(os.getenv("HOME", "/root"), path[2:])
- line = p.fmt_subst(
- line, "workspace-dirs", "Workspace: {}".format(path)
- )
+ line = p.fmt_subst(line, "workspace-dirs", "Workspace: {}".format(path))
else:
line = p.fmt_subst(line, "workspace-dirs", "")
# Dependencies
if "%{deps" in format_:
deps = [e.name for e in element.dependencies(Scope.ALL, recurse=False)]
- line = p.fmt_subst(
- line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n")
- )
+ line = p.fmt_subst(line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n"))
# Build Dependencies
if "%{build-deps" in format_:
- build_deps = [
- e.name for e in element.dependencies(Scope.BUILD, recurse=False)
- ]
- line = p.fmt_subst(
- line,
- "build-deps",
- yaml.safe_dump(build_deps, default_style=False).rstrip("\n"),
- )
+ build_deps = [e.name for e in element.dependencies(Scope.BUILD, recurse=False)]
+ line = p.fmt_subst(line, "build-deps", yaml.safe_dump(build_deps, default_style=False).rstrip("\n"),)
# Runtime Dependencies
if "%{runtime-deps" in format_:
- runtime_deps = [
- e.name for e in element.dependencies(Scope.RUN, recurse=False)
- ]
+ runtime_deps = [e.name for e in element.dependencies(Scope.RUN, recurse=False)]
line = p.fmt_subst(
- line,
- "runtime-deps",
- yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n"),
+ line, "runtime-deps", yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n"),
)
report += line + "\n"
@@ -519,15 +445,11 @@ class LogLine(Widget):
starttime = datetime.datetime.now()
text = ""
- self._resolved_keys = {
- element: element._get_cache_key() for element in stream.session_elements
- }
+ self._resolved_keys = {element: element._get_cache_key() for element in stream.session_elements}
# Main invocation context
text += "\n"
- text += self.content_profile.fmt(
- "BuildStream Version {}\n".format(bst_version), bold=True
- )
+ text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
values = OrderedDict()
values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
values["Project"] = "{} ({})".format(project.name, project.directory)
@@ -538,11 +460,7 @@ class LogLine(Widget):
text += "\n"
text += self.content_profile.fmt("User Configuration\n", bold=True)
values = OrderedDict()
- values["Configuration File"] = (
- "Default Configuration"
- if not context.config_origin
- else context.config_origin
- )
+ values["Configuration File"] = "Default Configuration" if not context.config_origin else context.config_origin
values["Cache Directory"] = context.cachedir
values["Log Files"] = context.logdir
values["Source Mirrors"] = context.sourcedir
@@ -570,8 +488,7 @@ class LogLine(Widget):
)
if project.config.element_factory and project.config.source_factory:
text += self._format_plugins(
- project.config.element_factory.loaded_dependencies,
- project.config.source_factory.loaded_dependencies,
+ project.config.element_factory.loaded_dependencies, project.config.source_factory.loaded_dependencies,
)
# Pipeline state
@@ -606,9 +523,7 @@ class LogLine(Widget):
text = ""
assert self._resolved_keys is not None
- elements = sorted(
- e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key()
- )
+ elements = sorted(e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key())
if elements:
text += self.content_profile.fmt("Resolved key Summary\n", bold=True)
text += self.show_pipeline(elements, self.context.log_element_format)
@@ -622,9 +537,7 @@ class LogLine(Widget):
# Exclude the failure messages if the job didn't ultimately fail
# (e.g. succeeded on retry)
if element_name in group.failed_tasks:
- values[element_name] = "".join(
- self._render(v) for v in messages
- )
+ values[element_name] = "".join(self._render(v) for v in messages)
if values:
text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -667,12 +580,7 @@ class LogLine(Widget):
+ skipped_align
)
- status_text += (
- self.content_profile.fmt("failed ")
- + self._err_profile.fmt(failed)
- + " "
- + failed_align
- )
+ status_text += self.content_profile.fmt("failed ") + self._err_profile.fmt(failed) + " " + failed_align
values["{} Queue".format(group.name)] = status_text
text += self._format_values(values, style_value=False)
@@ -701,9 +609,7 @@ class LogLine(Widget):
logfile_tokens = []
while format_string:
if format_string.startswith("%%"):
- logfile_tokens.append(
- FixedText(self.context, "%", content_profile, format_profile)
- )
+ logfile_tokens.append(FixedText(self.context, "%", content_profile, format_profile))
format_string = format_string[2:]
continue
m = re.search(r"^%\{([^\}]+)\}", format_string)
@@ -711,25 +617,17 @@ class LogLine(Widget):
variable = m.group(1)
format_string = format_string[m.end(0) :]
if variable not in self.logfile_variable_names:
- raise Exception(
- "'{0}' is not a valid log variable name.".format(variable)
- )
+ raise Exception("'{0}' is not a valid log variable name.".format(variable))
logfile_tokens.append(self.logfile_variable_names[variable])
else:
m = re.search("^[^%]+", format_string)
if m is not None:
- text = FixedText(
- self.context, m.group(0), content_profile, format_profile
- )
+ text = FixedText(self.context, m.group(0), content_profile, format_profile)
format_string = format_string[m.end(0) :]
logfile_tokens.append(text)
else:
# No idea what to do now
- raise Exception(
- "'{0}' could not be parsed into a valid logging format.".format(
- format_string
- )
- )
+ raise Exception("'{0}' could not be parsed into a valid logging format.".format(format_string))
return logfile_tokens
def _render(self, message):
@@ -754,11 +652,7 @@ class LogLine(Widget):
n_lines = len(lines)
abbrev = False
- if (
- message.message_type not in ERROR_MESSAGES
- and not frontend_message
- and n_lines > self._message_lines
- ):
+ if message.message_type not in ERROR_MESSAGES and not frontend_message and n_lines > self._message_lines:
lines = lines[0 : self._message_lines]
if self._message_lines > 0:
abbrev = True
@@ -775,10 +669,7 @@ class LogLine(Widget):
if abbrev:
text += self._indent + self.content_profile.fmt(
- "Message contains {} additional lines".format(
- n_lines - self._message_lines
- ),
- dim=True,
+ "Message contains {} additional lines".format(n_lines - self._message_lines), dim=True,
)
text += "\n"
@@ -793,18 +684,10 @@ class LogLine(Widget):
elif self._log_lines > 0:
text += (
self._indent
- + self._err_profile.fmt(
- "Printing the last {} lines from log file:".format(
- self._log_lines
- )
- )
- + "\n"
- )
- text += (
- self._indent
- + self._logfile_widget.render_abbrev(message, abbrev=False)
+ + self._err_profile.fmt("Printing the last {} lines from log file:".format(self._log_lines))
+ "\n"
)
+ text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + "\n"
text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
log_content = self._read_last_lines(message.logfile)
@@ -891,9 +774,7 @@ class LogLine(Widget):
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(
- " {}: {}".format(key, " " * (max_key_len - len(key)))
- )
+ text += self.format_profile.fmt(" {}: {}".format(key, " " * (max_key_len - len(key))))
if style_value:
text += self.content_profile.fmt(str(value))
else:
@@ -928,13 +809,9 @@ class LogLine(Widget):
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(
- " {}:{}".format(key, " " * (max_key_len - len(key)))
- )
+ text += self.format_profile.fmt(" {}:{}".format(key, " " * (max_key_len - len(key))))
- value_list = "\n\t" + "\n\t".join(
- (self._get_filestats(v, list_long=long_) for v in value)
- )
+ value_list = "\n\t" + "\n\t".join((self._get_filestats(v, list_long=long_) for v in value))
if value == []:
message = "\n\tThis element has no associated artifacts"
if style_value:
diff --git a/src/buildstream/_fuse/fuse.py b/src/buildstream/_fuse/fuse.py
index 9bedb2d4b..62b2de871 100644
--- a/src/buildstream/_fuse/fuse.py
+++ b/src/buildstream/_fuse/fuse.py
@@ -73,9 +73,7 @@ _machine = machine()
if _system == "Darwin":
_libiconv = CDLL(find_library("iconv"), RTLD_GLOBAL) # libfuse dependency
- _libfuse_path = (
- find_library("fuse4x") or find_library("osxfuse") or find_library("fuse")
- )
+ _libfuse_path = find_library("fuse4x") or find_library("osxfuse") or find_library("fuse")
else:
_libfuse_path = find_library("fuse")
@@ -98,12 +96,8 @@ if _system in ("Darwin", "Darwin-MacFuse", "FreeBSD"):
c_off_t = c_int64
c_pid_t = c_int32
c_uid_t = c_uint32
- setxattr_t = CFUNCTYPE(
- c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int, c_uint32
- )
- getxattr_t = CFUNCTYPE(
- c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_uint32
- )
+ setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int, c_uint32)
+ getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_uint32)
if _system == "Darwin":
c_stat._fields_ = [
("st_dev", c_dev_t),
@@ -369,28 +363,8 @@ class fuse_operations(Structure):
("truncate", CFUNCTYPE(c_int, c_char_p, c_off_t)),
("utime", c_voidp), # Deprecated, use utimens
("open", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
- (
- "read",
- CFUNCTYPE(
- c_int,
- c_char_p,
- POINTER(c_byte),
- c_size_t,
- c_off_t,
- POINTER(fuse_file_info),
- ),
- ),
- (
- "write",
- CFUNCTYPE(
- c_int,
- c_char_p,
- POINTER(c_byte),
- c_size_t,
- c_off_t,
- POINTER(fuse_file_info),
- ),
- ),
+ ("read", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t, POINTER(fuse_file_info),),),
+ ("write", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t, POINTER(fuse_file_info),),),
("statfs", CFUNCTYPE(c_int, c_char_p, POINTER(c_statvfs))),
("flush", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
("release", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
@@ -418,10 +392,7 @@ class fuse_operations(Structure):
("access", CFUNCTYPE(c_int, c_char_p, c_int)),
("create", CFUNCTYPE(c_int, c_char_p, c_mode_t, POINTER(fuse_file_info))),
("ftruncate", CFUNCTYPE(c_int, c_char_p, c_off_t, POINTER(fuse_file_info))),
- (
- "fgetattr",
- CFUNCTYPE(c_int, c_char_p, POINTER(c_stat), POINTER(fuse_file_info)),
- ),
+ ("fgetattr", CFUNCTYPE(c_int, c_char_p, POINTER(c_stat), POINTER(fuse_file_info)),),
("lock", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info), c_int, c_voidp)),
("utimens", CFUNCTYPE(c_int, c_char_p, POINTER(c_utimbuf))),
("bmap", CFUNCTYPE(c_int, c_char_p, c_size_t, POINTER(c_ulonglong))),
@@ -475,9 +446,7 @@ class FUSE(object):
("nothreads", "-s"),
)
- def __init__(
- self, operations, mountpoint, raw_fi=False, encoding="utf-8", **kwargs
- ):
+ def __init__(self, operations, mountpoint, raw_fi=False, encoding="utf-8", **kwargs):
"""
Setting raw_fi to True will cause FUSE to pass the fuse_file_info
@@ -526,9 +495,7 @@ class FUSE(object):
except ValueError:
old_handler = SIG_DFL
- err = _libfuse.fuse_main_real(
- len(args), argv, pointer(fuse_ops), sizeof(fuse_ops), None
- )
+ err = _libfuse.fuse_main_real(len(args), argv, pointer(fuse_ops), sizeof(fuse_ops), None)
try:
signal(SIGINT, old_handler)
@@ -572,9 +539,7 @@ class FUSE(object):
return self.fgetattr(path, buf, None)
def readlink(self, path, buf, bufsize):
- ret = self.operations("readlink", path.decode(self.encoding)).encode(
- self.encoding
- )
+ ret = self.operations("readlink", path.decode(self.encoding)).encode(self.encoding)
# copies a string into the given buffer
# (null terminated and truncated if necessary)
@@ -597,21 +562,15 @@ class FUSE(object):
def symlink(self, source, target):
"creates a symlink `target -> source` (e.g. ln -s source target)"
- return self.operations(
- "symlink", target.decode(self.encoding), source.decode(self.encoding)
- )
+ return self.operations("symlink", target.decode(self.encoding), source.decode(self.encoding))
def rename(self, old, new):
- return self.operations(
- "rename", old.decode(self.encoding), new.decode(self.encoding)
- )
+ return self.operations("rename", old.decode(self.encoding), new.decode(self.encoding))
def link(self, source, target):
"creates a hard link `target -> source` (e.g. ln source target)"
- return self.operations(
- "link", target.decode(self.encoding), source.decode(self.encoding)
- )
+ return self.operations("link", target.decode(self.encoding), source.decode(self.encoding))
def chmod(self, path, mode):
return self.operations("chmod", path.decode(self.encoding), mode)
@@ -643,17 +602,13 @@ class FUSE(object):
else:
fh = fip.contents.fh
- ret = self.operations(
- "read", self._decode_optional_path(path), size, offset, fh
- )
+ ret = self.operations("read", self._decode_optional_path(path), size, offset, fh)
if not ret:
return 0
retsize = len(ret)
- assert (
- retsize <= size
- ), "actual amount read {:d} greater than expected {:d}".format(retsize, size)
+ assert retsize <= size, "actual amount read {:d} greater than expected {:d}".format(retsize, size)
data = create_string_buffer(ret, retsize)
memmove(buf, data, retsize)
@@ -667,9 +622,7 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations(
- "write", self._decode_optional_path(path), data, offset, fh
- )
+ return self.operations("write", self._decode_optional_path(path), data, offset, fh)
def statfs(self, path, buf):
stv = buf.contents
@@ -706,18 +659,11 @@ class FUSE(object):
def setxattr(self, path, name, value, size, options, *args):
return self.operations(
- "setxattr",
- path.decode(self.encoding),
- name.decode(self.encoding),
- string_at(value, size),
- options,
- *args
+ "setxattr", path.decode(self.encoding), name.decode(self.encoding), string_at(value, size), options, *args
)
def getxattr(self, path, name, value, size, *args):
- ret = self.operations(
- "getxattr", path.decode(self.encoding), name.decode(self.encoding), *args
- )
+ ret = self.operations("getxattr", path.decode(self.encoding), name.decode(self.encoding), *args)
retsize = len(ret)
# allow size queries
@@ -754,9 +700,7 @@ class FUSE(object):
return retsize
def removexattr(self, path, name):
- return self.operations(
- "removexattr", path.decode(self.encoding), name.decode(self.encoding)
- )
+ return self.operations("removexattr", path.decode(self.encoding), name.decode(self.encoding))
def opendir(self, path, fip):
# Ignore raw_fi
@@ -766,9 +710,7 @@ class FUSE(object):
def readdir(self, path, buf, filler, offset, fip):
# Ignore raw_fi
- for item in self.operations(
- "readdir", self._decode_optional_path(path), fip.contents.fh
- ):
+ for item in self.operations("readdir", self._decode_optional_path(path), fip.contents.fh):
if isinstance(item, basestring):
name, st, offset = item, None, 0
@@ -787,15 +729,11 @@ class FUSE(object):
def releasedir(self, path, fip):
# Ignore raw_fi
- return self.operations(
- "releasedir", self._decode_optional_path(path), fip.contents.fh
- )
+ return self.operations("releasedir", self._decode_optional_path(path), fip.contents.fh)
def fsyncdir(self, path, datasync, fip):
# Ignore raw_fi
- return self.operations(
- "fsyncdir", self._decode_optional_path(path), datasync, fip.contents.fh
- )
+ return self.operations("fsyncdir", self._decode_optional_path(path), datasync, fip.contents.fh)
def init(self, conn):
return self.operations("init", "/")
diff --git a/src/buildstream/_fuse/mount.py b/src/buildstream/_fuse/mount.py
index 4df2ed603..d586ea2d5 100644
--- a/src/buildstream/_fuse/mount.py
+++ b/src/buildstream/_fuse/mount.py
@@ -89,9 +89,7 @@ class Mount:
################################################
def __init__(self, fuse_mount_options=None):
- self._fuse_mount_options = (
- {} if fuse_mount_options is None else fuse_mount_options
- )
+ self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
# _mount():
#
@@ -110,18 +108,14 @@ class Mount:
# Ensure the child process does not inherit our signal handlers, if the
# child wants to handle a signal then it will first set its own
# handler, and then unblock it.
- with _signals.blocked(
- [signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False
- ):
+ with _signals.blocked([signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False):
self.__process.start()
while not os.path.ismount(mountpoint):
if not self.__process.is_alive():
self.__logfile.seek(0)
stderr = self.__logfile.read()
- raise FuseMountError(
- "Unable to mount {}: {}".format(mountpoint, stderr.decode().strip())
- )
+ raise FuseMountError("Unable to mount {}: {}".format(mountpoint, stderr.decode().strip()))
time.sleep(1 / 100)
@@ -185,11 +179,7 @@ class Mount:
# Returns:
# (Operations): A FUSE Operations implementation
def create_operations(self):
- raise ImplError(
- "Mount subclass '{}' did not implement create_operations()".format(
- type(self).__name__
- )
- )
+ raise ImplError("Mount subclass '{}' did not implement create_operations()".format(type(self).__name__))
################################################
# Child Process #
@@ -211,9 +201,7 @@ class Mount:
# Ask the subclass to give us an Operations object
#
- self.__operations = (
- self.create_operations()
- ) # pylint: disable=assignment-from-no-return
+ self.__operations = self.create_operations() # pylint: disable=assignment-from-no-return
# Run fuse in foreground in this child process, internally libfuse
# will handle SIGTERM and gracefully exit its own little main loop.
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 11f1d6572..1fcfe335e 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -62,9 +62,7 @@ class _GitMirror(SourceFetcher):
self.ref = ref
self.tags = tags
self.primary = primary
- self.mirror = os.path.join(
- source.get_mirror_directory(), utils.url_directory_name(url)
- )
+ self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
self.mark_download_url(url)
# Ensures that the mirror exists
@@ -81,9 +79,7 @@ class _GitMirror(SourceFetcher):
# system configured tmpdir is not on the same partition.
#
with self.source.tempdir() as tmpdir:
- url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
self.source.call(
[self.source.host_git, "clone", "--mirror", "-n", url, tmpdir],
fail="Failed to clone git repository {}".format(url),
@@ -95,9 +91,7 @@ class _GitMirror(SourceFetcher):
except DirectoryExistsError:
# Another process was quicker to download this repository.
# Let's discard our own
- self.source.status(
- "{}: Discarding duplicate clone of {}".format(self.source, url)
- )
+ self.source.status("{}: Discarding duplicate clone of {}".format(self.source, url))
except OSError as e:
raise SourceError(
"{}: Failed to move cloned git repository {} from '{}' to '{}': {}".format(
@@ -106,9 +100,7 @@ class _GitMirror(SourceFetcher):
) from e
def _fetch(self, alias_override=None):
- url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
if alias_override:
remote_name = utils.url_directory_name(alias_override)
@@ -142,13 +134,9 @@ class _GitMirror(SourceFetcher):
def fetch(self, alias_override=None): # pylint: disable=arguments-differ
# Resolve the URL for the message
- resolved_url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ resolved_url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
- with self.source.timed_activity(
- "Fetching from {}".format(resolved_url), silent_nested=True
- ):
+ with self.source.timed_activity("Fetching from {}".format(resolved_url), silent_nested=True):
self.ensure(alias_override)
if not self.has_ref():
self._fetch(alias_override)
@@ -163,25 +151,19 @@ class _GitMirror(SourceFetcher):
return False
# Check if the ref is really there
- rc = self.source.call(
- [self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror
- )
+ rc = self.source.call([self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror)
return rc == 0
def assert_ref(self):
if not self.has_ref():
raise SourceError(
- "{}: expected ref '{}' was not found in git repository: '{}'".format(
- self.source, self.ref, self.url
- )
+ "{}: expected ref '{}' was not found in git repository: '{}'".format(self.source, self.ref, self.url)
)
def latest_commit_with_tags(self, tracking, track_tags=False):
_, output = self.source.check_output(
[self.source.host_git, "rev-parse", tracking],
- fail="Unable to find commit for specified branch name '{}'".format(
- tracking
- ),
+ fail="Unable to find commit for specified branch name '{}'".format(tracking),
cwd=self.mirror,
)
ref = output.rstrip("\n")
@@ -190,15 +172,7 @@ class _GitMirror(SourceFetcher):
# Prefix the ref with the closest tag, if available,
# to make the ref human readable
exit_code, output = self.source.check_output(
- [
- self.source.host_git,
- "describe",
- "--tags",
- "--abbrev=40",
- "--long",
- ref,
- ],
- cwd=self.mirror,
+ [self.source.host_git, "describe", "--tags", "--abbrev=40", "--long", ref,], cwd=self.mirror,
)
if exit_code == 0:
ref = output.rstrip("\n")
@@ -214,8 +188,7 @@ class _GitMirror(SourceFetcher):
["--tags", "--first-parent"],
]:
exit_code, output = self.source.check_output(
- [self.source.host_git, "describe", "--abbrev=0", ref, *options],
- cwd=self.mirror,
+ [self.source.host_git, "describe", "--abbrev=0", ref, *options], cwd=self.mirror,
)
if exit_code == 0:
tag = output.strip()
@@ -224,9 +197,7 @@ class _GitMirror(SourceFetcher):
fail="Unable to resolve tag '{}'".format(tag),
cwd=self.mirror,
)
- exit_code = self.source.call(
- [self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror
- )
+ exit_code = self.source.call([self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror)
annotated = exit_code == 0
tags.add((tag, commit_ref.strip(), annotated))
@@ -240,17 +211,8 @@ class _GitMirror(SourceFetcher):
# case we're just checking out a specific commit and then removing the .git/
# directory.
self.source.call(
- [
- self.source.host_git,
- "clone",
- "--no-checkout",
- "--shared",
- self.mirror,
- fullpath,
- ],
- fail="Failed to create git mirror {} in directory: {}".format(
- self.mirror, fullpath
- ),
+ [self.source.host_git, "clone", "--no-checkout", "--shared", self.mirror, fullpath,],
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True,
)
@@ -271,9 +233,7 @@ class _GitMirror(SourceFetcher):
self.source.call(
[self.source.host_git, "clone", "--no-checkout", self.mirror, fullpath],
- fail="Failed to clone git mirror {} in directory: {}".format(
- self.mirror, fullpath
- ),
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True,
)
@@ -292,20 +252,14 @@ class _GitMirror(SourceFetcher):
# List the submodules (path/url tuples) present at the given ref of this repo
def submodule_list(self):
modules = "{}:{}".format(self.ref, GIT_MODULES)
- exit_code, output = self.source.check_output(
- [self.source.host_git, "show", modules], cwd=self.mirror
- )
+ exit_code, output = self.source.check_output([self.source.host_git, "show", modules], cwd=self.mirror)
# If git show reports error code 128 here, we take it to mean there is
# no .gitmodules file to display for the given revision.
if exit_code == 128:
return
elif exit_code != 0:
- raise SourceError(
- "{plugin}: Failed to show gitmodules at ref {ref}".format(
- plugin=self, ref=self.ref
- )
- )
+ raise SourceError("{plugin}: Failed to show gitmodules at ref {ref}".format(plugin=self, ref=self.ref))
content = "\n".join([l.strip() for l in output.splitlines()])
@@ -331,9 +285,7 @@ class _GitMirror(SourceFetcher):
# object that corresponds to the submodule
_, output = self.source.check_output(
[self.source.host_git, "ls-tree", ref, submodule],
- fail="ls-tree failed for commit {} and submodule: {}".format(
- ref, submodule
- ),
+ fail="ls-tree failed for commit {} and submodule: {}".format(ref, submodule),
cwd=self.mirror,
)
@@ -345,26 +297,20 @@ class _GitMirror(SourceFetcher):
# fail if the commit hash is invalid
if len(submodule_commit) != 40:
raise SourceError(
- "{}: Error reading commit information for submodule '{}'".format(
- self.source, submodule
- )
+ "{}: Error reading commit information for submodule '{}'".format(self.source, submodule)
)
return submodule_commit
else:
detail = (
- "The submodule '{}' is defined either in the BuildStream source\n".format(
- submodule
- )
+ "The submodule '{}' is defined either in the BuildStream source\n".format(submodule)
+ "definition, or in a .gitmodules file. But the submodule was never added to the\n"
+ "underlying git repository with `git submodule add`."
)
self.source.warn(
- "{}: Ignoring inconsistent submodule '{}'".format(
- self.source, submodule
- ),
+ "{}: Ignoring inconsistent submodule '{}'".format(self.source, submodule),
detail=detail,
warning_token=WARN_INCONSISTENT_SUBMODULE,
)
@@ -398,9 +344,7 @@ class _GitMirror(SourceFetcher):
fail_temporarily=True,
cwd=self.mirror,
)
- self.source.warn(
- "refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines())
- )
+ self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
for line in out.splitlines():
rev = line.lstrip("-")
if line[0] == "-":
@@ -427,14 +371,7 @@ class _GitMirror(SourceFetcher):
)
commit_file.seek(0, 0)
self.source.call(
- [
- self.source.host_git,
- "hash-object",
- "-w",
- "-t",
- "commit",
- "--stdin",
- ],
+ [self.source.host_git, "hash-object", "-w", "-t", "commit", "--stdin",],
stdin=commit_file,
fail="Failed to add commit object {}".format(rev),
cwd=fullpath,
@@ -447,20 +384,11 @@ class _GitMirror(SourceFetcher):
for tag, commit_ref, annotated in self.tags:
if annotated:
with TemporaryFile(dir=tmpdir) as tag_file:
- tag_data = "object {}\ntype commit\ntag {}\n".format(
- commit_ref, tag
- )
+ tag_data = "object {}\ntype commit\ntag {}\n".format(commit_ref, tag)
tag_file.write(tag_data.encode("ascii"))
tag_file.seek(0, 0)
_, tag_ref = self.source.check_output(
- [
- self.source.host_git,
- "hash-object",
- "-w",
- "-t",
- "tag",
- "--stdin",
- ],
+ [self.source.host_git, "hash-object", "-w", "-t", "tag", "--stdin",],
stdin=tag_file,
fail="Failed to add tag object {}".format(tag),
cwd=fullpath,
@@ -518,9 +446,7 @@ class _GitSourceBase(Source):
self.track_tags = node.get_bool("track-tags", default=False)
self.original_url = node.get_str("url")
- self.mirror = self.BST_MIRROR_CLASS(
- self, "", self.original_url, ref, tags=tags, primary=True
- )
+ self.mirror = self.BST_MIRROR_CLASS(self, "", self.original_url, ref, tags=tags, primary=True)
self.tracking = node.get_str("track", None)
self.ref_format = node.get_enum("ref-format", _RefFormat, _RefFormat.SHA1)
@@ -529,8 +455,7 @@ class _GitSourceBase(Source):
# If it is missing both then we will be unable to track or build.
if self.mirror.ref is None and self.tracking is None:
raise SourceError(
- "{}: Git sources require a ref and/or track".format(self),
- reason="missing-track-and-ref",
+ "{}: Git sources require a ref and/or track".format(self), reason="missing-track-and-ref",
)
self.checkout_submodules = node.get_bool("checkout-submodules", default=True)
@@ -566,9 +491,7 @@ class _GitSourceBase(Source):
# from another location, it should not affect the cache key.
key = [self.original_url, self.mirror.ref]
if self.mirror.tags:
- tags = {
- tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags
- }
+ tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
key.append({"tags": tags})
# Only modify the cache key with checkout_submodules if it's something
@@ -582,9 +505,7 @@ class _GitSourceBase(Source):
key.append(self.submodule_overrides)
if self.submodule_checkout_overrides:
- key.append(
- {"submodule_checkout_overrides": self.submodule_checkout_overrides}
- )
+ key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
return key
@@ -631,22 +552,16 @@ class _GitSourceBase(Source):
if not self.tracking:
# Is there a better way to check if a ref is given.
if self.mirror.ref is None:
- detail = (
- "Without a tracking branch ref can not be updated. Please "
- + "provide a ref or a track."
- )
+ detail = "Without a tracking branch ref can not be updated. Please " + "provide a ref or a track."
raise SourceError(
- "{}: No track or ref".format(self),
- detail=detail,
- reason="track-attempt-no-track",
+ "{}: No track or ref".format(self), detail=detail, reason="track-attempt-no-track",
)
return None
# Resolve the URL for the message
resolved_url = self.translate_url(self.mirror.url)
with self.timed_activity(
- "Tracking {} from {}".format(self.tracking, resolved_url),
- silent_nested=True,
+ "Tracking {} from {}".format(self.tracking, resolved_url), silent_nested=True,
):
self.mirror.ensure()
self.mirror._fetch()
@@ -660,9 +575,7 @@ class _GitSourceBase(Source):
# XXX: may wish to refactor this as some code dupe with stage()
self._refresh_submodules()
- with self.timed_activity(
- 'Setting up workspace "{}"'.format(directory), silent_nested=True
- ):
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
self.mirror.init_workspace(directory)
for mirror in self.submodules:
mirror.init_workspace(directory)
@@ -678,9 +591,7 @@ class _GitSourceBase(Source):
# Stage the main repo in the specified directory
#
- with self.timed_activity(
- "Staging {}".format(self.mirror.url), silent_nested=True
- ):
+ with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
self.mirror.stage(directory)
for mirror in self.submodules:
mirror.stage(directory)
@@ -719,8 +630,7 @@ class _GitSourceBase(Source):
"{}: Invalid submodules specified".format(self),
warning_token=WARN_INVALID_SUBMODULE,
detail="The following submodules are specified in the source "
- "description but do not exist according to the repository\n\n"
- + "\n".join(detail),
+ "description but do not exist according to the repository\n\n" + "\n".join(detail),
)
# Warn about submodules which exist but have not been explicitly configured
@@ -741,28 +651,14 @@ class _GitSourceBase(Source):
ref_in_track = False
if self.tracking:
_, branch = self.check_output(
- [
- self.host_git,
- "branch",
- "--list",
- self.tracking,
- "--contains",
- self.mirror.ref,
- ],
+ [self.host_git, "branch", "--list", self.tracking, "--contains", self.mirror.ref,],
cwd=self.mirror.mirror,
)
if branch:
ref_in_track = True
else:
_, tag = self.check_output(
- [
- self.host_git,
- "tag",
- "--list",
- self.tracking,
- "--contains",
- self.mirror.ref,
- ],
+ [self.host_git, "tag", "--list", self.tracking, "--contains", self.mirror.ref,],
cwd=self.mirror.mirror,
)
if tag:
@@ -772,13 +668,9 @@ class _GitSourceBase(Source):
detail = (
"The ref provided for the element does not exist locally "
+ "in the provided track branch / tag '{}'.\n".format(self.tracking)
- + "You may wish to track the element to update the ref from '{}' ".format(
- self.tracking
- )
+ + "You may wish to track the element to update the ref from '{}' ".format(self.tracking)
+ "with `bst source track`,\n"
- + "or examine the upstream at '{}' for the specific ref.".format(
- self.mirror.url
- )
+ + "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
)
self.warn(
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index 860b1328f..f8737c1d1 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -32,14 +32,10 @@ class Includes:
if current_loader is None:
current_loader = self._loader
- includes_node = node.get_node(
- "(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True
- )
+ includes_node = node.get_node("(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True)
if includes_node:
- if (
- type(includes_node) is ScalarNode
- ): # pylint: disable=unidiomatic-typecheck
+ if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
includes = [includes_node.as_str()]
else:
includes = includes_node.as_str_list()
@@ -50,9 +46,7 @@ class Includes:
if only_local and ":" in include:
continue
try:
- include_node, file_path, sub_loader = self._include_file(
- include, current_loader
- )
+ include_node, file_path, sub_loader = self._include_file(include, current_loader)
except LoadError as e:
include_provenance = includes_node.get_provenance()
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -64,9 +58,7 @@ class Includes:
message = "{}: Include block references a directory instead of a file: '{}'.".format(
include_provenance, include
)
- raise LoadError(
- message, LoadErrorReason.LOADING_DIRECTORY
- ) from e
+ raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -74,9 +66,7 @@ class Includes:
if file_path in included:
include_provenance = includes_node.get_provenance()
raise LoadError(
- "{}: trying to recursively include {}".format(
- include_provenance, file_path
- ),
+ "{}: trying to recursively include {}".format(include_provenance, file_path),
LoadErrorReason.RECURSIVE_INCLUDE,
)
# Because the included node will be modified, we need
@@ -87,10 +77,7 @@ class Includes:
try:
included.add(file_path)
self.process(
- include_node,
- included=included,
- current_loader=sub_loader,
- only_local=only_local,
+ include_node, included=included, current_loader=sub_loader, only_local=only_local,
)
finally:
included.remove(file_path)
@@ -99,10 +86,7 @@ class Includes:
for value in node.values():
self._process_value(
- value,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ value, included=included, current_loader=current_loader, only_local=only_local,
)
# _include_file()
@@ -126,12 +110,7 @@ class Includes:
file_path = os.path.join(directory, include)
key = (current_loader, file_path)
if key not in self._loaded:
- self._loaded[key] = _yaml.load(
- file_path,
- shortname=shortname,
- project=project,
- copy_tree=self._copy_tree,
- )
+ self._loaded[key] = _yaml.load(file_path, shortname=shortname, project=project, copy_tree=self._copy_tree,)
return self._loaded[key], file_path, current_loader
# _process_value()
@@ -143,23 +122,15 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def _process_value(
- self, value, *, included=set(), current_loader=None, only_local=False
- ):
+ def _process_value(self, value, *, included=set(), current_loader=None, only_local=False):
value_type = type(value)
if value_type is MappingNode:
self.process(
- value,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ value, included=included, current_loader=current_loader, only_local=only_local,
)
elif value_type is SequenceNode:
for v in value:
self._process_value(
- v,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ v, included=included, current_loader=current_loader, only_local=only_local,
)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index d703bd711..729b3c5e8 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -74,9 +74,7 @@ class Loader:
self._context = context
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
- self._first_pass_options = (
- project.first_pass_config.options
- ) # Project options (OptionPool)
+ self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._parent = parent # The parent loader
self._fetch_subprojects = fetch_subprojects
@@ -101,9 +99,7 @@ class Loader:
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(
- self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False
- ):
+ def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False):
for filename in targets:
if os.path.isabs(filename):
@@ -111,9 +107,7 @@ class Loader:
# Expect that the caller gives us the right thing at least ?
raise LoadError(
"Target '{}' was not specified as a relative "
- "path to the base project directory: {}".format(
- filename, self._basedir
- ),
+ "path to the base project directory: {}".format(filename, self._basedir),
LoadErrorReason.INVALID_DATA,
)
@@ -155,11 +149,7 @@ class Loader:
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(
- loader._collect_element(
- element, task, ignore_workspaces=ignore_workspaces
- )
- )
+ ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces))
self._clean_caches()
@@ -241,9 +231,7 @@ class Loader:
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(
- fullpath, shortname=filename, copy_tree=rewritable, project=self.project
- )
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -252,9 +240,7 @@ class Loader:
filename, self.project.junction.name
)
else:
- message = "Could not find element '{}' in elements directory '{}'".format(
- filename, self._basedir
- )
+ message = "Could not find element '{}' in elements directory '{}'".format(filename, self._basedir)
if provenance:
message = "{}: {}".format(provenance, message)
@@ -265,14 +251,10 @@ class Loader:
detail = None
elements_dir = os.path.relpath(self._basedir, self.project.directory)
element_relpath = os.path.relpath(filename, elements_dir)
- if filename.startswith(elements_dir) and os.path.exists(
- os.path.join(self._basedir, element_relpath)
- ):
+ if filename.startswith(elements_dir) and os.path.exists(os.path.join(self._basedir, element_relpath)):
detail = "Did you mean '{}'?".format(element_relpath)
- raise LoadError(
- message, LoadErrorReason.MISSING_FILE, detail=detail
- ) from e
+ raise LoadError(message, LoadErrorReason.MISSING_FILE, detail=detail) from e
if e.reason == LoadErrorReason.LOADING_DIRECTORY:
# If a <directory>.bst file exists in the element path,
@@ -284,9 +266,7 @@ class Loader:
if os.path.exists(os.path.join(self._basedir, filename + ".bst")):
element_name = filename + ".bst"
detail = "Did you mean '{}'?\n".format(element_name)
- raise LoadError(
- message, LoadErrorReason.LOADING_DIRECTORY, detail=detail
- ) from e
+ raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY, detail=detail) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -355,14 +335,9 @@ class Loader:
if dep.junction:
self._load_file(dep.junction, rewritable, ticker, dep.provenance)
loader = self._get_loader(
- dep.junction,
- rewritable=rewritable,
- ticker=ticker,
- provenance=dep.provenance,
- )
- dep_element = loader._load_file(
- dep.name, rewritable, ticker, dep.provenance
+ dep.junction, rewritable=rewritable, ticker=ticker, provenance=dep.provenance,
)
+ dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
else:
dep_element = self._elements.get(dep.name)
@@ -370,19 +345,14 @@ class Loader:
# The loader does not have this available so we need to
# either recursively cause it to be loaded, or else we
# need to push this onto the loader queue in this loader
- dep_element = self._load_file_no_deps(
- dep.name, rewritable, dep.provenance
- )
+ dep_element = self._load_file_no_deps(dep.name, rewritable, dep.provenance)
dep_deps = extract_depends_from_node(dep_element.node)
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
# Pylint is not very happy about Cython and can't understand 'node' is a 'MappingNode'
- if (
- dep_element.node.get_str(Symbol.KIND) == "junction"
- ): # pylint: disable=no-member
+ if dep_element.node.get_str(Symbol.KIND) == "junction": # pylint: disable=no-member
raise LoadError(
- "{}: Cannot depend on junction".format(dep.provenance),
- LoadErrorReason.INVALID_DATA,
+ "{}: Cannot depend on junction".format(dep.provenance), LoadErrorReason.INVALID_DATA,
)
# All is well, push the dependency onto the LoadElement
@@ -429,16 +399,12 @@ class Loader:
# Create `chain`, the loop of element dependencies from this
# element back to itself, by trimming everything before this
# element from the sequence under consideration.
- chain = [
- element.full_name
- for element in sequence[sequence.index(element) :]
- ]
+ chain = [element.full_name for element in sequence[sequence.index(element) :]]
chain.append(element.full_name)
raise LoadError(
- (
- "Circular dependency detected at element: {}\n"
- + "Dependency chain: {}"
- ).format(element.full_name, " -> ".join(chain)),
+ ("Circular dependency detected at element: {}\n" + "Dependency chain: {}").format(
+ element.full_name, " -> ".join(chain)
+ ),
LoadErrorReason.CIRCULAR_DEPENDENCY,
)
if element not in validated:
@@ -488,9 +454,7 @@ class Loader:
if workspace and not ignore_workspaces:
workspace_node = {"kind": "workspace"}
workspace_node["path"] = workspace.get_absolute_path()
- workspace_node["ref"] = str(
- workspace.to_dict().get("last_successful", "ignored")
- )
+ workspace_node["ref"] = str(workspace.to_dict().get("last_successful", "ignored"))
node[Symbol.SOURCES] = [workspace_node]
skip_workspace = False
@@ -507,9 +471,7 @@ class Loader:
directory = source.get_str(Symbol.DIRECTORY, default=None)
if directory:
del source[Symbol.DIRECTORY]
- meta_source = MetaSource(
- element.name, index, element_kind, kind, source, directory
- )
+ meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
meta_sources.append(meta_source)
meta_element = MetaElement(
@@ -548,11 +510,7 @@ class Loader:
#
def _collect_element(self, top_element, task, ignore_workspaces=False):
element_queue = [top_element]
- meta_element_queue = [
- self._collect_element_no_deps(
- top_element, task, ignore_workspaces=ignore_workspaces
- )
- ]
+ meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)]
while element_queue:
element = element_queue.pop()
@@ -569,9 +527,7 @@ class Loader:
name = dep.element.name
if name not in loader._meta_elements:
- meta_dep = loader._collect_element_no_deps(
- dep.element, task, ignore_workspaces=ignore_workspaces
- )
+ meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces)
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
else:
@@ -598,9 +554,7 @@ class Loader:
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
- def _get_loader(
- self, filename, *, rewritable=False, ticker=None, level=0, provenance=None
- ):
+ def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, provenance=None):
provenance_str = ""
if provenance is not None:
@@ -626,11 +580,7 @@ class Loader:
# junctions in the parent take precedence over junctions defined
# in subprojects
loader = self._parent._get_loader(
- filename,
- rewritable=rewritable,
- ticker=ticker,
- level=level + 1,
- provenance=provenance,
+ filename, rewritable=rewritable, ticker=ticker, level=level + 1, provenance=provenance,
)
if loader:
self._loaders[filename] = loader
@@ -662,14 +612,10 @@ class Loader:
#
# Any task counting *inside* the junction will be handled by
# its loader.
- meta_element = self._collect_element_no_deps(
- self._elements[filename], _NO_PROGRESS
- )
+ meta_element = self._collect_element_no_deps(self._elements[filename], _NO_PROGRESS)
if meta_element.kind != "junction":
raise LoadError(
- "{}{}: Expected junction but element kind is {}".format(
- provenance_str, filename, meta_element.kind
- ),
+ "{}{}: Expected junction but element kind is {}".format(provenance_str, filename, meta_element.kind),
LoadErrorReason.INVALID_DATA,
)
@@ -688,8 +634,7 @@ class Loader:
# but since we haven't loaded those yet that's impossible.
if self._elements[filename].dependencies:
raise LoadError(
- "Dependencies are forbidden for 'junction' elements",
- LoadErrorReason.INVALID_JUNCTION,
+ "Dependencies are forbidden for 'junction' elements", LoadErrorReason.INVALID_JUNCTION,
)
element = Element._new_from_meta(meta_element)
@@ -699,28 +644,17 @@ class Loader:
# find loader for that project.
if element.target:
subproject_loader = self._get_loader(
- element.target_junction,
- rewritable=rewritable,
- ticker=ticker,
- level=level,
- provenance=provenance,
+ element.target_junction, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance,
)
loader = subproject_loader._get_loader(
- element.target_element,
- rewritable=rewritable,
- ticker=ticker,
- level=level,
- provenance=provenance,
+ element.target_element, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance,
)
self._loaders[filename] = loader
return loader
# Handle the case where a subproject needs to be fetched
#
- if (
- element._get_consistency() >= Consistency.RESOLVED
- and not element._source_cached()
- ):
+ if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
if ticker:
ticker(filename, "Fetching subproject")
self._fetch_subprojects([element])
@@ -728,13 +662,9 @@ class Loader:
# Handle the case where a subproject has no ref
#
elif element._get_consistency() == Consistency.INCONSISTENT:
- detail = "Try tracking the junction element with `bst source track {}`".format(
- filename
- )
+ detail = "Try tracking the junction element with `bst source track {}`".format(filename)
raise LoadError(
- "{}Subproject has no ref for junction: {}".format(
- provenance_str, filename
- ),
+ "{}Subproject has no ref for junction: {}".format(provenance_str, filename),
LoadErrorReason.SUBPROJECT_INCONSISTENT,
detail=detail,
)
@@ -747,11 +677,7 @@ class Loader:
# Stage sources
element._set_required()
basedir = os.path.join(
- self.project.directory,
- ".bst",
- "staged-junctions",
- filename,
- element._get_cache_key(),
+ self.project.directory, ".bst", "staged-junctions", filename, element._get_cache_key(),
)
if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
@@ -773,17 +699,12 @@ class Loader:
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
message = (
- provenance_str
- + "Could not find the project.conf file in the project "
+ provenance_str + "Could not find the project.conf file in the project "
"referred to by junction element '{}'.".format(element.name)
)
if element.path:
- message += " Was expecting it at path '{}' in the junction's source.".format(
- element.path
- )
- raise LoadError(
- message=message, reason=LoadErrorReason.INVALID_JUNCTION
- ) from e
+ message += " Was expecting it at path '{}' in the junction's source.".format(element.path)
+ raise LoadError(message=message, reason=LoadErrorReason.INVALID_JUNCTION) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -817,9 +738,7 @@ class Loader:
return None, junction_path[-1], self
else:
self._load_file(junction_path[-2], rewritable, ticker)
- loader = self._get_loader(
- junction_path[-2], rewritable=rewritable, ticker=ticker
- )
+ loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
return junction_path[-2], junction_path[-1], loader
# Print a warning message, checks warning_token against project configuration
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index bb83a6bc8..5466d3aa5 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -32,9 +32,7 @@ class MetaSource:
# config: The configuration data for the source
# first_pass: This source will be used with first project pass configuration (used for junctions).
#
- def __init__(
- self, element_name, element_index, element_kind, kind, config, directory
- ):
+ def __init__(self, element_name, element_index, element_kind, kind, config, directory):
self.element_name = element_name
self.element_index = element_index
self.element_kind = element_kind
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index 79d71441c..d18590885 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -69,19 +69,13 @@ class Message:
):
self.message_type = message_type # Message type
self.message = message # The message string
- self.element_name = (
- element_name # The instance element name of the issuing plugin
- )
+ self.element_name = element_name # The instance element name of the issuing plugin
self.element_key = element_key # The display key of the issuing plugin element
self.detail = detail # An additional detail string
- self.action_name = (
- action_name # Name of the task queue (fetch, refresh, build, etc)
- )
+ self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
self.elapsed = elapsed # The elapsed time, in timed messages
self.logfile = logfile # The log file path where commands took place
- self.sandbox = (
- sandbox # Whether the error that caused this message used a sandbox
- )
+ self.sandbox = sandbox # Whether the error that caused this message used a sandbox
self.pid = os.getpid() # The process pid
self.scheduler = scheduler # Whether this is a scheduler level message
self.creation_time = datetime.datetime.now()
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 687d64ebf..bf27f5620 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -159,18 +159,11 @@ class Messenger:
# silent_nested (bool): If True, all but _message.unconditional_messages are silenced
#
@contextmanager
- def timed_activity(
- self, activity_name, *, element_name=None, detail=None, silent_nested=False
- ):
+ def timed_activity(self, activity_name, *, element_name=None, detail=None, silent_nested=False):
with self._timed_suspendable() as timedata:
try:
# Push activity depth for status messages
- message = Message(
- MessageType.START,
- activity_name,
- detail=detail,
- element_name=element_name,
- )
+ message = Message(MessageType.START, activity_name, detail=detail, element_name=element_name,)
self.message(message)
with self.silence(actually_silence=silent_nested):
yield
@@ -179,22 +172,12 @@ class Messenger:
# Note the failure in status messages and reraise, the scheduler
# expects an error when there is an error.
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.FAIL,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
raise
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.SUCCESS,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
# simple_task()
@@ -211,14 +194,10 @@ class Messenger:
# Task: A Task object that represents this activity, principally used to report progress
#
@contextmanager
- def simple_task(
- self, activity_name, *, element_name=None, full_name=None, silent_nested=False
- ):
+ def simple_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
# Bypass use of State when none exists (e.g. tests)
if not self._state:
- with self.timed_activity(
- activity_name, element_name=element_name, silent_nested=silent_nested
- ):
+ with self.timed_activity(activity_name, element_name=element_name, silent_nested=silent_nested):
yield
return
@@ -227,9 +206,7 @@ class Messenger:
with self._timed_suspendable() as timedata:
try:
- message = Message(
- MessageType.START, activity_name, element_name=element_name
- )
+ message = Message(MessageType.START, activity_name, element_name=element_name)
self.message(message)
task = self._state.add_task(activity_name, full_name)
@@ -243,12 +220,7 @@ class Messenger:
except BstError:
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.FAIL,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
raise
finally:
@@ -262,17 +234,11 @@ class Messenger:
if task.current_progress is not None and elapsed > _DISPLAY_LIMIT:
if task.maximum_progress is not None:
- detail = "{} of {} subtasks processed".format(
- task.current_progress, task.maximum_progress
- )
+ detail = "{} of {} subtasks processed".format(task.current_progress, task.maximum_progress)
else:
detail = "{} subtasks processed".format(task.current_progress)
message = Message(
- MessageType.SUCCESS,
- activity_name,
- elapsed=elapsed,
- detail=detail,
- element_name=element_name,
+ MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail, element_name=element_name,
)
self.message(message)
@@ -308,9 +274,7 @@ class Messenger:
# Create the fully qualified logfile in the log directory,
# appending the pid and .log extension at the end.
- self._log_filename = os.path.join(
- logdir, "{}.{}.log".format(filename, os.getpid())
- )
+ self._log_filename = os.path.join(logdir, "{}.{}.log".format(filename, os.getpid()))
# Ensure the directory exists first
directory = os.path.dirname(self._log_filename)
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index f039ca28a..71d2f12f3 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -67,9 +67,7 @@ class Option:
# Assert valid symbol name for variable name
if self.variable is not None:
- _assert_symbol_name(
- self.variable, "variable name", ref_node=node.get_node("variable")
- )
+ _assert_symbol_name(self.variable, "variable name", ref_node=node.get_node("variable"))
# load_value()
#
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index ed7656ea3..2d663f0ef 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -54,16 +54,12 @@ class OptionArch(OptionEnum):
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
- provenance = (
- node.get_sequence("values").scalar_at(index).get_provenance()
- )
+ provenance = node.get_sequence("values").scalar_at(index).get_provenance()
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
raise LoadError(
- "{}Invalid value for {} option '{}': {}".format(
- prefix, self.OPTION_TYPE, self.name, e
- ),
+ "{}Invalid value for {} option '{}': {}".format(prefix, self.OPTION_TYPE, self.name, e),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index d8201de51..c0c1271e9 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -48,8 +48,7 @@ class OptionBool(Option):
self.value = False
else:
raise LoadError(
- "Invalid value for boolean option {}: {}".format(self.name, value),
- LoadErrorReason.INVALID_DATA,
+ "Invalid value for boolean option {}: {}".format(self.name, value), LoadErrorReason.INVALID_DATA,
)
def get_value(self):
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 80d0fa156..d30f45696 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -81,9 +81,7 @@ class OptionEnum(Option):
else:
prefix = ""
raise LoadError(
- "{}Invalid value for {} option '{}': {}\n".format(
- prefix, self.OPTION_TYPE, self.name, value
- )
+ "{}Invalid value for {} option '{}': {}\n".format(prefix, self.OPTION_TYPE, self.name, value)
+ "Valid values: {}".format(", ".join(self.values)),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index 5977930d4..82ede5649 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -90,9 +90,7 @@ class OptionFlags(Option):
else:
prefix = ""
raise LoadError(
- "{}Invalid value for flags option '{}': {}\n".format(
- prefix, self.name, value
- )
+ "{}Invalid value for flags option '{}': {}\n".format(prefix, self.name, value)
+ "Valid values: {}".format(", ".join(self.values)),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 3b58a5904..aa1c62a4f 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -88,10 +88,7 @@ class OptionPool:
# Assert that the option name is a valid symbol
_assert_symbol_name(
- option_name,
- "option name",
- ref_node=option_definition,
- allow_dashes=False,
+ option_name, "option name", ref_node=option_definition, allow_dashes=False,
)
opt_type_name = option_definition.get_enum("type", OptionTypes)
@@ -115,8 +112,7 @@ class OptionPool:
except KeyError as e:
p = option_value.get_provenance()
raise LoadError(
- "{}: Unknown option '{}' specified".format(p, option_name),
- LoadErrorReason.INVALID_DATA,
+ "{}: Unknown option '{}' specified".format(p, option_name), LoadErrorReason.INVALID_DATA,
) from e
option.load_value(node, transform=transform)
@@ -136,9 +132,7 @@ class OptionPool:
except KeyError as e:
if not ignore_unknown:
raise LoadError(
- "Unknown option '{}' specified on the command line".format(
- option_name
- ),
+ "Unknown option '{}' specified on the command line".format(option_name),
LoadErrorReason.INVALID_DATA,
) from e
else:
@@ -237,9 +231,7 @@ class OptionPool:
# Variables must be resolved at this point.
#
try:
- template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(
- expression
- )
+ template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(expression)
template = self._environment.from_string(template_string)
context = template.new_context(self._variables, shared=True)
result = template.root_render_func(context)
@@ -252,13 +244,11 @@ class OptionPool:
return False
else: # pragma: nocover
raise LoadError(
- "Failed to evaluate expression: {}".format(expression),
- LoadErrorReason.EXPRESSION_FAILED,
+ "Failed to evaluate expression: {}".format(expression), LoadErrorReason.EXPRESSION_FAILED,
)
except jinja2.exceptions.TemplateError as e:
raise LoadError(
- "Failed to evaluate expression ({}): {}".format(expression, e),
- LoadErrorReason.EXPRESSION_FAILED,
+ "Failed to evaluate expression ({}): {}".format(expression, e), LoadErrorReason.EXPRESSION_FAILED,
)
# Recursion assistent for lists, in case there
@@ -286,9 +276,7 @@ class OptionPool:
# it being overwritten by a later assertion which might also trigger.
if assertion is not None:
p = node.get_scalar("(!)").get_provenance()
- raise LoadError(
- "{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION
- )
+ raise LoadError("{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION)
if conditions is not None:
del node["(?)"]
@@ -298,9 +286,7 @@ class OptionPool:
if len(tuples) > 1:
provenance = condition.get_provenance()
raise LoadError(
- "{}: Conditional statement has more than one key".format(
- provenance
- ),
+ "{}: Conditional statement has more than one key".format(provenance),
LoadErrorReason.INVALID_DATA,
)
@@ -312,14 +298,10 @@ class OptionPool:
provenance = condition.get_provenance()
raise LoadError("{}: {}".format(provenance, e), e.reason) from e
- if (
- type(value) is not MappingNode
- ): # pylint: disable=unidiomatic-typecheck
+ if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
provenance = condition.get_provenance()
raise LoadError(
- "{}: Only values of type 'dict' can be composed.".format(
- provenance
- ),
+ "{}: Only values of type 'dict' can be composed.".format(provenance),
LoadErrorReason.ILLEGAL_COMPOSITE,
)
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index ace93acef..9dac30bf7 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -103,18 +103,12 @@ class Pipeline:
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
- with PROFILER.profile(
- Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)
- ):
- elements = self._project.load_elements(
- targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces
- )
+ with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
+ elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
- element_groups = [
- [next(elt_iter) for i in range(len(group))] for group in target_groups
- ]
+ element_groups = [[next(elt_iter) for i in range(len(group))] for group in target_groups]
return tuple(element_groups)
@@ -141,9 +135,7 @@ class Pipeline:
# targets (list of Element): The list of toplevel element targets
#
def resolve_elements(self, targets):
- with self._context.messenger.simple_task(
- "Resolving cached state", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
# We need to go through the project to access the loader
if task:
task.set_maximum_progress(self._project.loader.loaded)
@@ -174,9 +166,7 @@ class Pipeline:
# targets (list [Element]): The list of element targets
#
def check_remotes(self, targets):
- with self._context.messenger.simple_task(
- "Querying remotes for cached status", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Querying remotes for cached status", silent_nested=True) as task:
task.set_maximum_progress(len(targets))
for element in targets:
@@ -219,9 +209,7 @@ class Pipeline:
def plan(self, elements):
# Keep locally cached elements in the plan if remote artifact cache is used
# to allow pulling artifact with strict cache key, if available.
- plan_cached = (
- not self._context.get_strict() and self._artifacts.has_fetch_remotes()
- )
+ plan_cached = not self._context.get_strict() and self._artifacts.has_fetch_remotes()
return _Planner().plan(elements, plan_cached)
@@ -250,8 +238,7 @@ class Pipeline:
new_elm = t._get_source_element()
if new_elm != t and not silent:
self._message(
- MessageType.INFO,
- "Element '{}' redirected to '{}'".format(t.name, new_elm.name),
+ MessageType.INFO, "Element '{}' redirected to '{}'".format(t.name, new_elm.name),
)
if new_elm not in elements:
elements.append(new_elm)
@@ -307,11 +294,7 @@ class Pipeline:
# Build a list of 'intersection' elements, i.e. the set of
# elements that lie on the border closest to excepted elements
# between excepted and target elements.
- intersection = list(
- itertools.chain.from_iterable(
- find_intersection(element) for element in except_targets
- )
- )
+ intersection = list(itertools.chain.from_iterable(find_intersection(element) for element in except_targets))
# Now use this set of elements to traverse the targeted
# elements, except 'intersection' elements and their unique
@@ -432,30 +415,21 @@ class Pipeline:
if inconsistent:
detail = "Exact versions are missing for the following elements:\n\n"
for element in inconsistent:
- detail += " Element: {} is inconsistent\n".format(
- element._get_full_name()
- )
+ detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT:
detail += " {} is missing ref\n".format(source)
detail += "\n"
detail += "Try tracking these elements first with `bst source track`\n"
- raise PipelineError(
- "Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline"
- )
+ raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
if inconsistent_workspaced:
- detail = (
- "Some workspaces exist but are not closed\n"
- + "Try closing them with `bst workspace close`\n\n"
- )
+ detail = "Some workspaces exist but are not closed\n" + "Try closing them with `bst workspace close`\n\n"
for element in inconsistent_workspaced:
detail += " " + element._get_full_name() + "\n"
raise PipelineError(
- "Inconsistent pipeline",
- detail=detail,
- reason="inconsistent-pipeline-workspaced",
+ "Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced",
)
# assert_sources_cached()
@@ -469,18 +443,13 @@ class Pipeline:
uncached = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if (
- element._get_consistency() < Consistency.CACHED
- and not element._source_cached()
- ):
+ if element._get_consistency() < Consistency.CACHED and not element._source_cached():
uncached.append(element)
if uncached:
detail = "Sources are not cached for the following elements:\n\n"
for element in uncached:
- detail += " Following sources for element: {} are not cached:\n".format(
- element._get_full_name()
- )
+ detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() < Consistency.CACHED:
detail += " {}\n".format(source)
@@ -490,9 +459,7 @@ class Pipeline:
+ "or run this command with `--fetch` option\n"
)
- raise PipelineError(
- "Uncached sources", detail=detail, reason="uncached-sources"
- )
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
#############################################################
# Private Methods #
@@ -541,9 +508,7 @@ class Pipeline:
+ "in a project which does not use project.refs ref-storage."
)
- raise PipelineError(
- "Untrackable sources", detail=detail, reason="untrackable-sources"
- )
+ raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
# _message()
#
@@ -601,8 +566,4 @@ class _Planner:
for index, item in enumerate(depth_sorted):
item[0]._set_depth(index)
- return [
- item[0]
- for item in depth_sorted
- if plan_cached or not item[0]._cached_success()
- ]
+ return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached_success()]
diff --git a/src/buildstream/_platform/darwin.py b/src/buildstream/_platform/darwin.py
index adc858842..06491e8b4 100644
--- a/src/buildstream/_platform/darwin.py
+++ b/src/buildstream/_platform/darwin.py
@@ -60,8 +60,7 @@ class Darwin(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
kwargs["dummy_reason"] = (
- "OSXFUSE is not supported and there are no supported sandbox "
- + "technologies for MacOS at this time"
+ "OSXFUSE is not supported and there are no supported sandbox " + "technologies for MacOS at this time"
)
return SandboxDummy(*args, **kwargs)
diff --git a/src/buildstream/_platform/platform.py b/src/buildstream/_platform/platform.py
index ebac66843..1fddbe82c 100644
--- a/src/buildstream/_platform/platform.py
+++ b/src/buildstream/_platform/platform.py
@@ -114,9 +114,7 @@ class Platform:
elif backend == "win32":
from .win32 import Win32 as PlatformImpl # pylint: disable=cyclic-import
elif backend == "fallback":
- from .fallback import (
- Fallback as PlatformImpl,
- ) # pylint: disable=cyclic-import
+ from .fallback import Fallback as PlatformImpl # pylint: disable=cyclic-import
else:
raise PlatformError("No such platform: '{}'".format(backend))
@@ -212,17 +210,11 @@ class Platform:
# (Sandbox) A sandbox
#
def create_sandbox(self, *args, **kwargs):
- raise ImplError(
- "Platform {platform} does not implement create_sandbox()".format(
- platform=type(self).__name__
- )
- )
+ raise ImplError("Platform {platform} does not implement create_sandbox()".format(platform=type(self).__name__))
def check_sandbox_config(self, config):
raise ImplError(
- "Platform {platform} does not implement check_sandbox_config()".format(
- platform=type(self).__name__
- )
+ "Platform {platform} does not implement check_sandbox_config()".format(platform=type(self).__name__)
)
def maximize_open_file_limit(self):
@@ -243,7 +235,5 @@ class Platform:
def _setup_dummy_sandbox(self):
raise ImplError(
- "Platform {platform} does not implement _setup_dummy_sandbox()".format(
- platform=type(self).__name__
- )
+ "Platform {platform} does not implement _setup_dummy_sandbox()".format(platform=type(self).__name__)
)
diff --git a/src/buildstream/_platform/win32.py b/src/buildstream/_platform/win32.py
index a6aaf1662..a2529d8f6 100644
--- a/src/buildstream/_platform/win32.py
+++ b/src/buildstream/_platform/win32.py
@@ -49,9 +49,7 @@ class Win32(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs[
- "dummy_reason"
- ] = "There are no supported sandbox technologies for Win32 at this time."
+ kwargs["dummy_reason"] = "There are no supported sandbox technologies for Win32 at this time."
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index f542f6fd4..95ac192dc 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -42,15 +42,7 @@ from . import utils
# Pipelines.
#
class PluginContext:
- def __init__(
- self,
- plugin_base,
- base_type,
- site_plugin_path,
- *,
- plugin_origins=None,
- format_versions={}
- ):
+ def __init__(self, plugin_base, base_type, site_plugin_path, *, plugin_origins=None, format_versions={}):
# For pickling across processes, make sure this context has a unique
# identifier, which we prepend to the identifier of each PluginSource.
@@ -140,9 +132,7 @@ class PluginContext:
def _get_local_plugin_source(self, path):
if ("local", path) not in self._alternate_sources:
# key by a tuple to avoid collision
- source = self._plugin_base.make_plugin_source(
- searchpath=[path], identifier=self._identifier + path,
- )
+ source = self._plugin_base.make_plugin_source(searchpath=[path], identifier=self._identifier + path,)
# Ensure that sources never get garbage collected,
# as they'll take the plugins with them.
self._alternate_sources[("local", path)] = source
@@ -157,22 +147,12 @@ class PluginContext:
# key by a tuple to avoid collision
try:
- package = pkg_resources.get_entry_info(
- package_name, "buildstream.plugins", kind
- )
+ package = pkg_resources.get_entry_info(package_name, "buildstream.plugins", kind)
except pkg_resources.DistributionNotFound as e:
- raise PluginError(
- "Failed to load {} plugin '{}': {}".format(
- self._base_type.__name__, kind, e
- )
- ) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
if package is None:
- raise PluginError(
- "Pip package {} does not contain a plugin named '{}'".format(
- package_name, kind
- )
- )
+ raise PluginError("Pip package {} does not contain a plugin named '{}'".format(package_name, kind))
location = package.dist.get_resource_filename(
pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py"
@@ -182,16 +162,14 @@ class PluginContext:
# may need to extract the file.
try:
defaults = package.dist.get_resource_filename(
- pkg_resources._manager,
- package.module_name.replace(".", os.sep) + ".yaml",
+ pkg_resources._manager, package.module_name.replace(".", os.sep) + ".yaml",
)
except KeyError:
# The plugin didn't have an accompanying YAML file
defaults = None
source = self._plugin_base.make_plugin_source(
- searchpath=[os.path.dirname(location)],
- identifier=self._identifier + os.path.dirname(location),
+ searchpath=[os.path.dirname(location)], identifier=self._identifier + os.path.dirname(location),
)
self._alternate_sources[("pip", package_name)] = source
@@ -221,9 +199,7 @@ class PluginContext:
else:
raise PluginError(
"Failed to load plugin '{}': "
- "Unexpected plugin origin '{}'".format(
- kind, origin.get_str("origin")
- )
+ "Unexpected plugin origin '{}'".format(kind, origin.get_str("origin"))
)
loaded_dependency = True
break
@@ -231,11 +207,7 @@ class PluginContext:
# Fall back to getting the source from site
if not source:
if kind not in self._site_source.list_plugins():
- raise PluginError(
- "No {} type registered for kind '{}'".format(
- self._base_type.__name__, kind
- )
- )
+ raise PluginError("No {} type registered for kind '{}'".format(self._base_type.__name__, kind))
source = self._site_source
@@ -257,25 +229,17 @@ class PluginContext:
defaults = os.path.join(plugin_dir, plugin_conf_name)
except ImportError as e:
- raise PluginError(
- "Failed to load {} plugin '{}': {}".format(
- self._base_type.__name__, kind, e
- )
- ) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
try:
plugin_type = plugin.setup()
except AttributeError as e:
raise PluginError(
- "{} plugin '{}' did not provide a setup() function".format(
- self._base_type.__name__, kind
- )
+ "{} plugin '{}' did not provide a setup() function".format(self._base_type.__name__, kind)
) from e
except TypeError as e:
raise PluginError(
- "setup symbol in {} plugin '{}' is not a function".format(
- self._base_type.__name__, kind
- )
+ "setup symbol in {} plugin '{}' is not a function".format(self._base_type.__name__, kind)
) from e
self._assert_plugin(kind, plugin_type)
@@ -286,18 +250,13 @@ class PluginContext:
if kind in self._types:
raise PluginError(
"Tried to register {} plugin for existing kind '{}' "
- "(already registered {})".format(
- self._base_type.__name__, kind, self._types[kind].__name__
- )
+ "(already registered {})".format(self._base_type.__name__, kind, self._types[kind].__name__)
)
try:
if not issubclass(plugin_type, self._base_type):
raise PluginError(
"{} plugin '{}' returned type '{}', which is not a subclass of {}".format(
- self._base_type.__name__,
- kind,
- plugin_type.__name__,
- self._base_type.__name__,
+ self._base_type.__name__, kind, plugin_type.__name__, self._base_type.__name__,
)
)
except TypeError as e:
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index b8a9537a8..854c26e10 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -62,9 +62,7 @@ class _Profile:
filename_template = os.path.join(
os.getcwd(),
"profile-{}-{}".format(
- datetime.datetime.fromtimestamp(self.start_time).strftime(
- "%Y%m%dT%H%M%S"
- ),
+ datetime.datetime.fromtimestamp(self.start_time).strftime("%Y%m%dT%H%M%S"),
self.key.replace("/", "-").replace(".", "-"),
),
)
@@ -100,9 +98,7 @@ class _Profile:
)
with open(self.log_filename, "a") as fp:
- stats = pstats.Stats(
- self.profiler, *self._additional_pstats_files, stream=fp
- )
+ stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
# Create the log file
fp.write(heading)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index bc361d288..812d96d5a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -118,10 +118,7 @@ class Project:
self._context = context # The invocation Context, a private member
if search_for_project:
- (
- self.directory,
- self._invoked_from_workspace_element,
- ) = self._find_project_dir(directory)
+ (self.directory, self._invoked_from_workspace_element,) = self._find_project_dir(directory)
else:
self.directory = directory
self._invoked_from_workspace_element = None
@@ -270,16 +267,14 @@ class Project:
if full_path.is_symlink():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' must not point to "
- "symbolic links ".format(provenance, path_str),
+ "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' first component must "
- "not be '..'".format(provenance, path_str),
+ "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID,
)
@@ -287,14 +282,11 @@ class Project:
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
full_resolved_path = full_path.resolve()
else:
- full_resolved_path = full_path.resolve(
- strict=True
- ) # pylint: disable=unexpected-keyword-arg
+ full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' does not exist".format(provenance, path_str),
- LoadErrorReason.MISSING_FILE,
+ "{}: Specified path '{}' does not exist".format(provenance, path_str), LoadErrorReason.MISSING_FILE,
)
is_inside = self._absolute_directory_path in full_resolved_path.parents or (
@@ -313,37 +305,28 @@ class Project:
provenance = node.get_provenance()
raise LoadError(
"{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root.".format(
- provenance, path
- ),
+ "Please specify a path relative to the project's root.".format(provenance, path),
LoadErrorReason.PROJ_PATH_INVALID,
)
- if full_resolved_path.is_socket() or (
- full_resolved_path.is_fifo() or full_resolved_path.is_block_device()
- ):
+ if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' points to an unsupported "
- "file kind".format(provenance, path_str),
+ "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_file and not full_resolved_path.is_file():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a regular file".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a regular file".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_dir and not full_resolved_path.is_dir():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a directory".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a directory".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
@@ -391,9 +374,7 @@ class Project:
#
def create_element(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.element_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.element_factory.create(self._context, self, meta)
else:
return self.config.element_factory.create(self._context, self, meta)
@@ -423,9 +404,7 @@ class Project:
#
def create_source(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.source_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.source_factory.create(self._context, self, meta)
else:
return self.config.source_factory.create(self._context, self, meta)
@@ -461,9 +440,7 @@ class Project:
else:
config = self.config
- if (
- not alias or alias not in config._aliases
- ): # pylint: disable=unsupported-membership-test
+ if not alias or alias not in config._aliases: # pylint: disable=unsupported-membership-test
return [None]
mirror_list = []
@@ -490,15 +467,9 @@ class Project:
# (list): A list of loaded Element
#
def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
- with self._context.messenger.simple_task(
- "Loading elements", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
meta_elements = self.loader.load(
- targets,
- task,
- rewritable=rewritable,
- ticker=None,
- ignore_workspaces=ignore_workspaces,
+ targets, task, rewritable=rewritable, ticker=None, ignore_workspaces=ignore_workspaces,
)
with self._context.messenger.simple_task("Resolving elements") as task:
@@ -512,20 +483,11 @@ class Project:
# been discovered in the resolve() phase.
redundant_refs = Element._get_redundant_source_refs()
if redundant_refs:
- detail = (
- "The following inline specified source references will be ignored:\n\n"
- )
- lines = [
- "{}:{}".format(source._get_provenance(), ref)
- for source, ref in redundant_refs
- ]
+ detail = "The following inline specified source references will be ignored:\n\n"
+ lines = ["{}:{}".format(source._get_provenance(), ref) for source, ref in redundant_refs]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(
- MessageType.WARN,
- "Ignoring redundant source references",
- detail=detail,
- )
+ Message(MessageType.WARN, "Ignoring redundant source references", detail=detail,)
)
return elements
@@ -551,9 +513,7 @@ class Project:
#
artifacts = []
for ref in targets:
- artifacts.append(
- ArtifactElement._new_from_artifact_ref(ref, self._context, task)
- )
+ artifacts.append(ArtifactElement._new_from_artifact_ref(ref, self._context, task))
ArtifactElement._clear_artifact_refs_cache()
@@ -667,9 +627,7 @@ class Project:
major, minor = utils.get_bst_version()
raise LoadError(
"Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
- "Use latest 1.x release".format(
- format_version, major, minor, BST_FORMAT_VERSION_MIN
- ),
+ "Use latest 1.x release".format(format_version, major, minor, BST_FORMAT_VERSION_MIN),
LoadErrorReason.UNSUPPORTED_PROJECT,
)
@@ -690,15 +648,10 @@ class Project:
self.name = self._project_conf.get_str("name")
# Validate that project name is a valid symbol name
- _assert_symbol_name(
- self.name, "project name", ref_node=pre_config_node.get_node("name")
- )
+ _assert_symbol_name(self.name, "project name", ref_node=pre_config_node.get_node("name"))
self.element_path = os.path.join(
- self.directory,
- self.get_path_from_node(
- pre_config_node.get_scalar("element-path"), check_is_dir=True
- ),
+ self.directory, self.get_path_from_node(pre_config_node.get_scalar("element-path"), check_is_dir=True),
)
self.config.options = OptionPool(self.element_path)
@@ -709,16 +662,9 @@ class Project:
self._default_targets = defaults.get_str_list("targets")
# Fatal warnings
- self._fatal_warnings = pre_config_node.get_str_list(
- "fatal-warnings", default=[]
- )
+ self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
- self.loader = Loader(
- self._context,
- self,
- parent=parent_loader,
- fetch_subprojects=fetch_subprojects,
- )
+ self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects,)
self._project_includes = Includes(self.loader, copy_tree=False)
@@ -738,9 +684,7 @@ class Project:
]:
p = ref_storage_node.get_provenance()
raise LoadError(
- "{}: Invalid value '{}' specified for ref-storage".format(
- p, self.ref_storage
- ),
+ "{}: Invalid value '{}' specified for ref-storage".format(p, self.ref_storage),
LoadErrorReason.INVALID_DATA,
)
@@ -767,32 +711,24 @@ class Project:
#
# Load artifacts pull/push configuration for this project
- self.artifact_cache_specs = ArtifactCache.specs_from_config_node(
- config, self.directory
- )
+ self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
# If there is a junction Element which specifies that we want to remotely cache
# its elements, append the junction's remotes to the artifact cache specs list
if self.junction:
parent = self.junction._get_project()
if self.junction.cache_junction_elements:
- self.artifact_cache_specs = (
- parent.artifact_cache_specs + self.artifact_cache_specs
- )
+ self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
if self.junction.ignore_junction_remotes:
self.artifact_cache_specs = []
# Load source caches with pull/push config
- self.source_cache_specs = SourceCache.specs_from_config_node(
- config, self.directory
- )
+ self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
- override_specs = SandboxRemote.specs_from_config_node(
- self._context.get_overrides(self.name), self.directory
- )
+ override_specs = SandboxRemote.specs_from_config_node(self._context.get_overrides(self.name), self.directory)
if override_specs is not None:
self.remote_execution_specs = override_specs
@@ -824,9 +760,7 @@ class Project:
)
)
- if (
- CoreWarnings.OVERLAPS not in self._fatal_warnings
- ) and fail_on_overlap.as_bool():
+ if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap.as_bool():
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Load project.refs if it exists, this may be ignored.
@@ -889,18 +823,14 @@ class Project:
output.options.load(options_node)
if self.junction:
# load before user configuration
- output.options.load_yaml_values(
- self.junction.options, transform=self.junction.node_subst_vars
- )
+ output.options.load_yaml_values(self.junction.options, transform=self.junction.node_subst_vars)
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
override_options = overrides.get_mapping("options", default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
- output.options.load_cli_values(
- self._cli_options, ignore_unknown=ignore_unknown
- )
+ output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
# We're done modifying options, now we can use them for substitutions
output.options.resolve()
@@ -938,9 +868,7 @@ class Project:
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
- output.default_mirror = self._default_mirror or overrides.get_str(
- "default-mirror", default=None
- )
+ output.default_mirror = self._default_mirror or overrides.get_str("default-mirror", default=None)
mirrors = config.get_sequence("mirrors", default=[])
for mirror in mirrors:
@@ -949,9 +877,7 @@ class Project:
mirror_name = mirror.get_str("name")
alias_mappings = {}
for alias_mapping, uris in mirror.get_mapping("aliases").items():
- assert (
- type(uris) is SequenceNode
- ) # pylint: disable=unidiomatic-typecheck
+ assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
@@ -978,9 +904,7 @@ class Project:
def _find_project_dir(self, directory):
workspace_element = None
config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
- found_directory, filename = utils._search_upward_for_files(
- directory, config_filenames
- )
+ found_directory, filename = utils._search_upward_for_files(directory, config_filenames)
if filename == _PROJECT_CONF_FILE:
project_directory = found_directory
elif filename == WORKSPACE_PROJECT_FILE:
@@ -1022,8 +946,7 @@ class Project:
for key in source_versions.keys():
if key in source_format_versions:
raise LoadError(
- "Duplicate listing of source '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of source '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
source_format_versions[key] = source_versions.get_int(key)
@@ -1032,8 +955,7 @@ class Project:
for key in element_versions.keys():
if key in element_format_versions:
raise LoadError(
- "Duplicate listing of element '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of element '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
element_format_versions[key] = element_versions.get_int(key)
@@ -1047,14 +969,10 @@ class Project:
pluginbase = PluginBase(package="buildstream.plugins")
output.element_factory = ElementFactory(
- pluginbase,
- plugin_origins=plugin_element_origins,
- format_versions=element_format_versions,
+ pluginbase, plugin_origins=plugin_element_origins, format_versions=element_format_versions,
)
output.source_factory = SourceFactory(
- pluginbase,
- plugin_origins=plugin_source_origins,
- format_versions=source_format_versions,
+ pluginbase, plugin_origins=plugin_source_origins, format_versions=source_format_versions,
)
# _store_origin()
@@ -1074,9 +992,7 @@ class Project:
expected_groups = ["sources", "elements"]
if plugin_group not in expected_groups:
raise LoadError(
- "Unexpected plugin group: {}, expecting {}".format(
- plugin_group, expected_groups
- ),
+ "Unexpected plugin group: {}, expecting {}".format(plugin_group, expected_groups),
LoadErrorReason.INVALID_DATA,
)
if plugin_group in origin.keys():
@@ -1089,9 +1005,7 @@ class Project:
del origin_node[group]
if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
- path = self.get_path_from_node(
- origin.get_scalar("path"), check_is_dir=True
- )
+ path = self.get_path_from_node(origin.get_scalar("path"), check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
origin_node["path"] = os.path.join(self.directory, path)
destination.append(origin_node)
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 4b25192e4..aca7c6712 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -62,9 +62,7 @@ class ProjectRefs:
#
def load(self, options):
try:
- self._toplevel_node = _yaml.load(
- self._fullpath, shortname=self._base_name, copy_tree=True
- )
+ self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
provenance = self._toplevel_node.get_provenance()
self._toplevel_save = provenance._toplevel
@@ -114,9 +112,7 @@ class ProjectRefs:
# If we couldnt find the orignal, create a new one.
#
if node is None:
- node = self._lookup(
- self._toplevel_save, project, element, source_index, ensure=True
- )
+ node = self._lookup(self._toplevel_save, project, element, source_index, ensure=True)
return node
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
index 5418aac18..334915dd3 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -557,27 +555,19 @@ _UPDATEARTIFACTREQUEST = _descriptor.Descriptor(
_ARTIFACT_DEPENDENCY.containing_type = _ARTIFACT
_ARTIFACT_LOGFILE.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT_LOGFILE.containing_type = _ARTIFACT
_ARTIFACT.fields_by_name[
"files"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT.fields_by_name["build_deps"].message_type = _ARTIFACT_DEPENDENCY
_ARTIFACT.fields_by_name[
"public_data"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT.fields_by_name["logs"].message_type = _ARTIFACT_LOGFILE
_ARTIFACT.fields_by_name[
"buildtree"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATEARTIFACTREQUEST.fields_by_name["artifact"].message_type = _ARTIFACT
DESCRIPTOR.message_types_by_name["Artifact"] = _ARTIFACT
DESCRIPTOR.message_types_by_name["GetArtifactRequest"] = _GETARTIFACTREQUEST
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
index 694780e25..9e6dd4359 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2,
-)
+from buildstream._protos.buildstream.v2 import artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2
class ArtifactServiceStub(object):
@@ -66,7 +64,5 @@ def add_ArtifactServiceServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.ArtifactService", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.ArtifactService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
index 50621e7bd..c26497cb5 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -482,20 +480,12 @@ _SERVERCAPABILITIES = _descriptor.Descriptor(
_GETREFERENCERESPONSE.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATEREFERENCEREQUEST.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
-_SERVERCAPABILITIES.fields_by_name[
- "artifact_capabilities"
-].message_type = _ARTIFACTCAPABILITIES
-_SERVERCAPABILITIES.fields_by_name[
- "source_capabilities"
-].message_type = _SOURCECAPABILITIES
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+_SERVERCAPABILITIES.fields_by_name["artifact_capabilities"].message_type = _ARTIFACTCAPABILITIES
+_SERVERCAPABILITIES.fields_by_name["source_capabilities"].message_type = _SOURCECAPABILITIES
DESCRIPTOR.message_types_by_name["GetReferenceRequest"] = _GETREFERENCEREQUEST
DESCRIPTOR.message_types_by_name["GetReferenceResponse"] = _GETREFERENCERESPONSE
DESCRIPTOR.message_types_by_name["UpdateReferenceRequest"] = _UPDATEREFERENCEREQUEST
@@ -635,9 +625,7 @@ _REFERENCESTORAGE = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETREFERENCEREQUEST,
output_type=_GETREFERENCERESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}"
- ),
+ serialized_options=_b("\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}"),
),
_descriptor.MethodDescriptor(
name="UpdateReference",
@@ -657,9 +645,7 @@ _REFERENCESTORAGE = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_STATUSREQUEST,
output_type=_STATUSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status"
- ),
+ serialized_options=_b("\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status"),
),
],
)
@@ -684,9 +670,7 @@ _CAPABILITIES = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETCAPABILITIESREQUEST,
output_type=_SERVERCAPABILITIES,
- serialized_options=_b(
- "\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities"
- ),
+ serialized_options=_b("\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities"),
),
],
)
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
index 7c4ca932b..ca890fa53 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2,
-)
+from buildstream._protos.buildstream.v2 import buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2
class ReferenceStorageStub(object):
@@ -86,9 +84,7 @@ def add_ReferenceStorageServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.ReferenceStorage", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.ReferenceStorage", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
@@ -129,7 +125,5 @@ def add_CapabilitiesServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.Capabilities", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.Capabilities", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2.py b/src/buildstream/_protos/buildstream/v2/source_pb2.py
index af1e456b9..04ac7356d 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -228,9 +226,7 @@ _UPDATESOURCEREQUEST = _descriptor.Descriptor(
_SOURCE.fields_by_name[
"files"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATESOURCEREQUEST.fields_by_name["source"].message_type = _SOURCE
DESCRIPTOR.message_types_by_name["Source"] = _SOURCE
DESCRIPTOR.message_types_by_name["GetSourceRequest"] = _GETSOURCEREQUEST
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
index 11958366b..4c3a84075 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- source_pb2 as buildstream_dot_v2_dot_source__pb2,
-)
+from buildstream._protos.buildstream.v2 import source_pb2 as buildstream_dot_v2_dot_source__pb2
class SourceServiceStub(object):
@@ -67,7 +65,5 @@ def add_SourceServiceServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.SourceService", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.SourceService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/api/annotations_pb2.py b/src/buildstream/_protos/google/api/annotations_pb2.py
index b68e2147c..ac997d36e 100644
--- a/src/buildstream/_protos/google/api/annotations_pb2.py
+++ b/src/buildstream/_protos/google/api/annotations_pb2.py
@@ -29,10 +29,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
serialized_pb=_b(
'\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3'
),
- dependencies=[
- google_dot_api_dot_http__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,
- ],
+ dependencies=[google_dot_api_dot_http__pb2.DESCRIPTOR, google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,],
)
diff --git a/src/buildstream/_protos/google/api/http_pb2.py b/src/buildstream/_protos/google/api/http_pb2.py
index b85402af0..5c3dbdf80 100644
--- a/src/buildstream/_protos/google/api/http_pb2.py
+++ b/src/buildstream/_protos/google/api/http_pb2.py
@@ -264,11 +264,7 @@ _HTTPRULE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
- name="pattern",
- full_name="google.api.HttpRule.pattern",
- index=0,
- containing_type=None,
- fields=[],
+ name="pattern", full_name="google.api.HttpRule.pattern", index=0, containing_type=None, fields=[],
),
],
serialized_start=124,
@@ -342,15 +338,11 @@ _HTTPRULE.fields_by_name["put"].containing_oneof = _HTTPRULE.oneofs_by_name["pat
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["post"])
_HTTPRULE.fields_by_name["post"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["delete"])
-_HTTPRULE.fields_by_name["delete"].containing_oneof = _HTTPRULE.oneofs_by_name[
- "pattern"
-]
+_HTTPRULE.fields_by_name["delete"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["patch"])
_HTTPRULE.fields_by_name["patch"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["custom"])
-_HTTPRULE.fields_by_name["custom"].containing_oneof = _HTTPRULE.oneofs_by_name[
- "pattern"
-]
+_HTTPRULE.fields_by_name["custom"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
DESCRIPTOR.message_types_by_name["Http"] = _HTTP
DESCRIPTOR.message_types_by_name["HttpRule"] = _HTTPRULE
DESCRIPTOR.message_types_by_name["CustomHttpPattern"] = _CUSTOMHTTPPATTERN
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
index 4a0badd27..e472b9ffb 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
@@ -15,9 +15,7 @@ from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
@@ -31,10 +29,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
serialized_pb=_b(
'\n"google/bytestream/bytestream.proto\x12\x11google.bytestream\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/wrappers.proto"M\n\x0bReadRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x13\n\x0bread_offset\x18\x02 \x01(\x03\x12\x12\n\nread_limit\x18\x03 \x01(\x03"\x1c\n\x0cReadResponse\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"_\n\x0cWriteRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x14\n\x0cwrite_offset\x18\x02 \x01(\x03\x12\x14\n\x0c\x66inish_write\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"\'\n\rWriteResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03"0\n\x17QueryWriteStatusRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t"D\n\x18QueryWriteStatusResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03\x12\x10\n\x08\x63omplete\x18\x02 \x01(\x08\x32\x92\x02\n\nByteStream\x12I\n\x04Read\x12\x1e.google.bytestream.ReadRequest\x1a\x1f.google.bytestream.ReadResponse0\x01\x12L\n\x05Write\x12\x1f.google.bytestream.WriteRequest\x1a .google.bytestream.WriteResponse(\x01\x12k\n\x10QueryWriteStatus\x12*.google.bytestream.QueryWriteStatusRequest\x1a+.google.bytestream.QueryWriteStatusResponseBe\n\x15\x63om.google.bytestreamB\x0f\x42yteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestreamb\x06proto3'
),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
- ],
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,],
)
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
index a7b5ac589..66ed25016 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.bytestream import (
- bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2,
-)
+from buildstream._protos.google.bytestream import bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2
class ByteStreamStub(object):
@@ -157,7 +155,5 @@ def add_ByteStreamServicer_to_server(servicer, server):
response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "google.bytestream.ByteStream", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("google.bytestream.ByteStream", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2.py b/src/buildstream/_protos/google/longrunning/operations_pb2.py
index 0b30b7c11..ea2a6f674 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2.py
@@ -15,9 +15,7 @@ from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from buildstream._protos.google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
@@ -149,11 +147,7 @@ _OPERATION = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
- name="result",
- full_name="google.longrunning.Operation.result",
- index=0,
- containing_type=None,
- fields=[],
+ name="result", full_name="google.longrunning.Operation.result", index=0, containing_type=None, fields=[],
),
],
serialized_start=171,
@@ -427,21 +421,13 @@ _DELETEOPERATIONREQUEST = _descriptor.Descriptor(
serialized_end=652,
)
-_OPERATION.fields_by_name[
- "metadata"
-].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name["metadata"].message_type = google_dot_protobuf_dot_any__pb2._ANY
_OPERATION.fields_by_name["error"].message_type = google_dot_rpc_dot_status__pb2._STATUS
-_OPERATION.fields_by_name[
- "response"
-].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name["response"].message_type = google_dot_protobuf_dot_any__pb2._ANY
_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["error"])
-_OPERATION.fields_by_name["error"].containing_oneof = _OPERATION.oneofs_by_name[
- "result"
-]
+_OPERATION.fields_by_name["error"].containing_oneof = _OPERATION.oneofs_by_name["result"]
_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["response"])
-_OPERATION.fields_by_name["response"].containing_oneof = _OPERATION.oneofs_by_name[
- "result"
-]
+_OPERATION.fields_by_name["response"].containing_oneof = _OPERATION.oneofs_by_name["result"]
_LISTOPERATIONSRESPONSE.fields_by_name["operations"].message_type = _OPERATION
DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION
DESCRIPTOR.message_types_by_name["GetOperationRequest"] = _GETOPERATIONREQUEST
@@ -536,9 +522,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_LISTOPERATIONSREQUEST,
output_type=_LISTOPERATIONSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002\027\022\025/v1/{name=operations}"
- ),
+ serialized_options=_b("\202\323\344\223\002\027\022\025/v1/{name=operations}"),
),
_descriptor.MethodDescriptor(
name="GetOperation",
@@ -547,9 +531,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETOPERATIONREQUEST,
output_type=_OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002\032\022\030/v1/{name=operations/**}"
- ),
+ serialized_options=_b("\202\323\344\223\002\032\022\030/v1/{name=operations/**}"),
),
_descriptor.MethodDescriptor(
name="DeleteOperation",
@@ -558,9 +540,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_DELETEOPERATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- "\202\323\344\223\002\032*\030/v1/{name=operations/**}"
- ),
+ serialized_options=_b("\202\323\344\223\002\032*\030/v1/{name=operations/**}"),
),
_descriptor.MethodDescriptor(
name="CancelOperation",
@@ -569,9 +549,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_CANCELOPERATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- '\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*'
- ),
+ serialized_options=_b('\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*'),
),
],
)
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
index 24240730a..66c755056 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
+from buildstream._protos.google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
@@ -129,7 +127,5 @@ def add_OperationsServicer_to_server(servicer, server):
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "google.longrunning.Operations", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("google.longrunning.Operations", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/rpc/code_pb2.py b/src/buildstream/_protos/google/rpc/code_pb2.py
index 120fb0812..d6688c1e8 100644
--- a/src/buildstream/_protos/google/rpc/code_pb2.py
+++ b/src/buildstream/_protos/google/rpc/code_pb2.py
@@ -34,85 +34,37 @@ _CODE = _descriptor.EnumDescriptor(
filename=None,
file=DESCRIPTOR,
values=[
+ _descriptor.EnumValueDescriptor(name="OK", index=0, number=0, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="CANCELLED", index=1, number=1, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="UNKNOWN", index=2, number=2, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="OK", index=0, number=0, serialized_options=None, type=None
+ name="INVALID_ARGUMENT", index=3, number=3, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="CANCELLED", index=1, number=1, serialized_options=None, type=None
+ name="DEADLINE_EXCEEDED", index=4, number=4, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="NOT_FOUND", index=5, number=5, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="ALREADY_EXISTS", index=6, number=6, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="UNKNOWN", index=2, number=2, serialized_options=None, type=None
+ name="PERMISSION_DENIED", index=7, number=7, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="INVALID_ARGUMENT",
- index=3,
- number=3,
- serialized_options=None,
- type=None,
+ name="UNAUTHENTICATED", index=8, number=16, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="DEADLINE_EXCEEDED",
- index=4,
- number=4,
- serialized_options=None,
- type=None,
+ name="RESOURCE_EXHAUSTED", index=9, number=8, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="NOT_FOUND", index=5, number=5, serialized_options=None, type=None
+ name="FAILED_PRECONDITION", index=10, number=9, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="ABORTED", index=11, number=10, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="OUT_OF_RANGE", index=12, number=11, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="ALREADY_EXISTS", index=6, number=6, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="PERMISSION_DENIED",
- index=7,
- number=7,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="UNAUTHENTICATED",
- index=8,
- number=16,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="RESOURCE_EXHAUSTED",
- index=9,
- number=8,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="FAILED_PRECONDITION",
- index=10,
- number=9,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="ABORTED", index=11, number=10, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="OUT_OF_RANGE", index=12, number=11, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UNIMPLEMENTED",
- index=13,
- number=12,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="INTERNAL", index=14, number=13, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UNAVAILABLE", index=15, number=14, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DATA_LOSS", index=16, number=15, serialized_options=None, type=None
+ name="UNIMPLEMENTED", index=13, number=12, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="INTERNAL", index=14, number=13, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="UNAVAILABLE", index=15, number=14, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="DATA_LOSS", index=16, number=15, serialized_options=None, type=None),
],
containing_type=None,
serialized_options=None,
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index 671adb95c..473d5c2a3 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -42,11 +42,7 @@ class RemoteType(FastEnum):
#
# Defines the basic structure of a remote specification.
#
-class RemoteSpec(
- namedtuple(
- "RemoteSpec", "url push server_cert client_key client_cert instance_name type"
- )
-):
+class RemoteSpec(namedtuple("RemoteSpec", "url push server_cert client_key client_cert instance_name type")):
# new_from_config_node
#
@@ -65,23 +61,14 @@ class RemoteSpec(
@classmethod
def new_from_config_node(cls, spec_node, basedir=None):
spec_node.validate_keys(
- [
- "url",
- "push",
- "server-cert",
- "client-key",
- "client-cert",
- "instance-name",
- "type",
- ]
+ ["url", "push", "server-cert", "client-key", "client-cert", "instance-name", "type",]
)
url = spec_node.get_str("url")
if not url:
provenance = spec_node.get_node("url").get_provenance()
raise LoadError(
- "{}: empty artifact cache URL".format(provenance),
- LoadErrorReason.INVALID_DATA,
+ "{}: empty artifact cache URL".format(provenance), LoadErrorReason.INVALID_DATA,
)
push = spec_node.get_bool("push", default=False)
@@ -94,33 +81,25 @@ class RemoteSpec(
return cert
cert_keys = ("server-cert", "client-key", "client-cert")
- server_cert, client_key, client_cert = tuple(
- parse_cert(key) for key in cert_keys
- )
+ server_cert, client_key, client_cert = tuple(parse_cert(key) for key in cert_keys)
if client_key and not client_cert:
provenance = spec_node.get_node("client-key").get_provenance()
raise LoadError(
- "{}: 'client-key' was specified without 'client-cert'".format(
- provenance
- ),
+ "{}: 'client-key' was specified without 'client-cert'".format(provenance),
LoadErrorReason.INVALID_DATA,
)
if client_cert and not client_key:
provenance = spec_node.get_node("client-cert").get_provenance()
raise LoadError(
- "{}: 'client-cert' was specified without 'client-key'".format(
- provenance
- ),
+ "{}: 'client-cert' was specified without 'client-key'".format(provenance),
LoadErrorReason.INVALID_DATA,
)
type_ = spec_node.get_enum("type", RemoteType, default=RemoteType.ALL)
- return cls(
- url, push, server_cert, client_key, client_cert, instance_name, type_
- )
+ return cls(url, push, server_cert, client_key, client_cert, instance_name, type_)
# FIXME: This can be made much nicer in python 3.7 through the use of
@@ -194,13 +173,9 @@ class BaseRemote:
self.client_key = client_key
self.client_cert = client_cert
credentials = grpc.ssl_channel_credentials(
- root_certificates=self.server_cert,
- private_key=self.client_key,
- certificate_chain=self.client_cert,
- )
- self.channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, port), credentials
+ root_certificates=self.server_cert, private_key=self.client_key, certificate_chain=self.client_cert,
)
+ self.channel = grpc.secure_channel("{}:{}".format(url.hostname, port), credentials)
else:
raise RemoteError("Unsupported URL: {}".format(self.spec.url))
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 2a9f935b5..f3136104f 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -81,9 +81,7 @@ class ElementJob(Job):
self._complete_cb(self, self._element, status, self._result)
def create_child_job(self, *args, **kwargs):
- return ChildElementJob(
- *args, element=self._element, action_cb=self._action_cb, **kwargs
- )
+ return ChildElementJob(*args, element=self._element, action_cb=self._action_cb, **kwargs)
class ChildElementJob(ChildJob):
@@ -98,13 +96,9 @@ class ChildElementJob(ChildJob):
#
# This should probably be omitted for non-build tasks but it's harmless here
elt_env = self._element.get_environment()
- env_dump = yaml.round_trip_dump(
- elt_env, default_flow_style=False, allow_unicode=True
- )
+ env_dump = yaml.round_trip_dump(elt_env, default_flow_style=False, allow_unicode=True)
self.message(
- MessageType.LOG,
- "Build environment for element {}".format(self._element.name),
- detail=env_dump,
+ MessageType.LOG, "Build environment for element {}".format(self._element.name), detail=env_dump,
)
# Run the action
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 3a5694a71..8baf8fe1b 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -146,13 +146,9 @@ class Job:
self._terminated = False # Whether this job has been explicitly terminated
self._logfile = logfile
- self._message_element_name = (
- None # The plugin instance element name for messaging
- )
+ self._message_element_name = None # The plugin instance element name for messaging
self._message_element_key = None # The element key for messaging
- self._element = (
- None # The Element() passed to the Job() constructor, if applicable
- )
+ self._element = None # The Element() passed to the Job() constructor, if applicable
# set_name()
#
@@ -182,15 +178,9 @@ class Job:
self._message_element_key,
)
- if (
- self._scheduler.context.platform.does_multiprocessing_start_require_pickling()
- ):
- pickled = pickle_child_job(
- child_job, self._scheduler.context.get_projects(),
- )
- self._process = Process(
- target=do_pickled_child_job, args=[pickled, self._queue],
- )
+ if self._scheduler.context.platform.does_multiprocessing_start_require_pickling():
+ pickled = pickle_child_job(child_job, self._scheduler.context.get_projects(),)
+ self._process = Process(target=do_pickled_child_job, args=[pickled, self._queue],)
else:
self._process = Process(target=child_job.child_action, args=[self._queue],)
@@ -198,9 +188,7 @@ class Job:
# the child process does not inherit the parent's state, but the main
# process will be notified of any signal after we launch the child.
#
- with _signals.blocked(
- [signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False
- ):
+ with _signals.blocked([signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False):
self._process.start()
# Wait for the child task to complete.
@@ -282,8 +270,7 @@ class Job:
def kill(self):
# Force kill
self.message(
- MessageType.WARN,
- "{} did not terminate gracefully, killing".format(self.action_name),
+ MessageType.WARN, "{} did not terminate gracefully, killing".format(self.action_name),
)
utils._kill_process_tree(self._process.pid)
@@ -358,22 +345,14 @@ class Job:
# kwargs: Remaining Message() constructor arguments, note that you can
# override 'element_name' and 'element_key' this way.
#
- def message(
- self, message_type, message, element_name=None, element_key=None, **kwargs
- ):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
if element_key is None:
element_key = self._message_element_key
- message = Message(
- message_type,
- message,
- element_name=element_name,
- element_key=element_key,
- **kwargs
- )
+ message = Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
self._scheduler.notify_messenger(message)
# get_element()
@@ -405,11 +384,7 @@ class Job:
# lists, dicts, numbers, but not Element instances).
#
def handle_message(self, message):
- raise ImplError(
- "Job '{kind}' does not implement handle_message()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement handle_message()".format(kind=type(self).__name__))
# parent_complete()
#
@@ -421,11 +396,7 @@ class Job:
# result (any): The result returned by child_process().
#
def parent_complete(self, status, result):
- raise ImplError(
- "Job '{kind}' does not implement parent_complete()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement parent_complete()".format(kind=type(self).__name__))
# create_child_job()
#
@@ -443,11 +414,7 @@ class Job:
# (ChildJob): An instance of a subclass of ChildJob.
#
def create_child_job(self, *args, **kwargs):
- raise ImplError(
- "Job '{kind}' does not implement create_child_job()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement create_child_job()".format(kind=type(self).__name__))
#######################################################
# Local Private Methods #
@@ -480,9 +447,7 @@ class Job:
# An unexpected return code was returned; fail permanently and report
self.message(
MessageType.ERROR,
- "Internal job process unexpectedly died with exit code {}".format(
- returncode
- ),
+ "Internal job process unexpectedly died with exit code {}".format(returncode),
logfile=self._logfile,
)
returncode = _ReturnCode.PERM_FAIL
@@ -490,11 +455,7 @@ class Job:
# We don't want to retry if we got OK or a permanent fail.
retry_flag = returncode == _ReturnCode.FAIL
- if (
- retry_flag
- and (self._tries <= self._max_retries)
- and not self._scheduler.terminated
- ):
+ if retry_flag and (self._tries <= self._max_retries) and not self._scheduler.terminated:
self.start()
return
@@ -548,9 +509,7 @@ class Job:
elif envelope.message_type is _MessageType.SUBCLASS_CUSTOM_MESSAGE:
self.handle_message(envelope.message)
else:
- assert False, "Unhandled message type '{}': {}".format(
- envelope.message_type, envelope.message
- )
+ assert False, "Unhandled message type '{}': {}".format(envelope.message_type, envelope.message)
# _parent_process_queue()
#
@@ -587,9 +546,7 @@ class Job:
# http://bugs.python.org/issue3831
#
if not self._listening:
- self._scheduler.loop.add_reader(
- self._queue._reader.fileno(), self._parent_recv
- )
+ self._scheduler.loop.add_reader(self._queue._reader.fileno(), self._parent_recv)
self._listening = True
# _parent_stop_listening()
@@ -627,15 +584,7 @@ class Job:
#
class ChildJob:
def __init__(
- self,
- action_name,
- messenger,
- logdir,
- logfile,
- max_retries,
- tries,
- message_element_name,
- message_element_key,
+ self, action_name, messenger, logdir, logfile, max_retries, tries, message_element_name, message_element_key,
):
self.action_name = action_name
@@ -666,9 +615,7 @@ class ChildJob:
# for front end display if not already set or explicitly
# overriden here.
#
- def message(
- self, message_type, message, element_name=None, element_key=None, **kwargs
- ):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
@@ -676,13 +623,7 @@ class ChildJob:
if element_key is None:
element_key = self._message_element_key
self._messenger.message(
- Message(
- message_type,
- message,
- element_name=element_name,
- element_key=element_key,
- **kwargs
- )
+ Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
)
# send_message()
@@ -720,11 +661,7 @@ class ChildJob:
# the result of the Job.
#
def child_process(self):
- raise ImplError(
- "ChildJob '{kind}' does not implement child_process()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("ChildJob '{kind}' does not implement child_process()".format(kind=type(self).__name__))
# child_process_data()
#
@@ -782,22 +719,18 @@ class ChildJob:
# Time, log and and run the action function
#
- with _signals.suspendable(
- stop_time, resume_time
- ), self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
+ with _signals.suspendable(stop_time, resume_time), self._messenger.recorded_messages(
+ self._logfile, self._logdir
+ ) as filename:
self.message(MessageType.START, self.action_name, logfile=filename)
try:
# Try the task action
- result = (
- self.child_process()
- ) # pylint: disable=assignment-from-no-return
+ result = self.child_process() # pylint: disable=assignment-from-no-return
except SkipJob as e:
elapsed = datetime.datetime.now() - starttime
- self.message(
- MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename
- )
+ self.message(MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename)
# Alert parent of skip by return code
self._child_shutdown(_ReturnCode.SKIPPED)
@@ -829,9 +762,7 @@ class ChildJob:
# Set return code based on whether or not the error was temporary.
#
- self._child_shutdown(
- _ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL
- )
+ self._child_shutdown(_ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL)
except Exception: # pylint: disable=broad-except
@@ -840,16 +771,10 @@ class ChildJob:
# and print it to the log file.
#
elapsed = datetime.datetime.now() - starttime
- detail = "An unhandled exception occured:\n\n{}".format(
- traceback.format_exc()
- )
+ detail = "An unhandled exception occured:\n\n{}".format(traceback.format_exc())
self.message(
- MessageType.BUG,
- self.action_name,
- elapsed=elapsed,
- detail=detail,
- logfile=filename,
+ MessageType.BUG, self.action_name, elapsed=elapsed, detail=detail, logfile=filename,
)
# Unhandled exceptions should permenantly fail
self._child_shutdown(_ReturnCode.PERM_FAIL)
@@ -861,10 +786,7 @@ class ChildJob:
elapsed = datetime.datetime.now() - starttime
self.message(
- MessageType.SUCCESS,
- self.action_name,
- elapsed=elapsed,
- logfile=filename,
+ MessageType.SUCCESS, self.action_name, elapsed=elapsed, logfile=filename,
)
# Shutdown needs to stay outside of the above context manager,
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index 0b482d080..1d47f67db 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -23,9 +23,7 @@ import io
import pickle
from ..._protos.buildstream.v2.artifact_pb2 import Artifact as ArtifactProto
-from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import (
- Digest as DigestProto,
-)
+from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Digest as DigestProto
# BuildStream toplevel imports
from ..._loader import Loader
@@ -143,10 +141,7 @@ def _pickle_child_job_data(child_job_data, projects):
]
plugin_class_to_factory = {
- cls: factory
- for factory in factory_list
- if factory is not None
- for cls, _ in factory.all_loaded_plugins()
+ cls: factory for factory in factory_list if factory is not None for cls, _ in factory.all_loaded_plugins()
}
pickled_data = io.BytesIO()
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 62ebcc003..d812a48d6 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -76,23 +76,16 @@ class Queue:
self._done_queue = deque() # Processed / Skipped elements
self._max_retries = 0
- self._required_element_check = (
- False # Whether we should check that elements are required before enqueuing
- )
+ self._required_element_check = False # Whether we should check that elements are required before enqueuing
# Assert the subclass has setup class data
assert self.action_name is not None
assert self.complete_name is not None
- if (
- ResourceType.UPLOAD in self.resources
- or ResourceType.DOWNLOAD in self.resources
- ):
+ if ResourceType.UPLOAD in self.resources or ResourceType.DOWNLOAD in self.resources:
self._max_retries = scheduler.context.sched_network_retries
- self._task_group = self._scheduler._state.add_task_group(
- self.action_name, self.complete_name
- )
+ self._task_group = self._scheduler._state.add_task_group(self.action_name, self.complete_name)
# destroy()
#
@@ -169,11 +162,7 @@ class Queue:
# element (Element): The element waiting to be pushed into the queue
#
def register_pending_element(self, element):
- raise ImplError(
- "Queue type: {} does not implement register_pending_element()".format(
- self.action_name
- )
- )
+ raise ImplError("Queue type: {} does not implement register_pending_element()".format(self.action_name))
#####################################################
# Scheduler / Pipeline facing APIs #
@@ -293,10 +282,7 @@ class Queue:
workspaces.save_config()
except BstError as e:
self._message(
- element,
- MessageType.ERROR,
- "Error saving workspaces",
- detail=str(e),
+ element, MessageType.ERROR, "Error saving workspaces", detail=str(e),
)
except Exception: # pylint: disable=broad-except
self._message(
@@ -334,9 +320,7 @@ class Queue:
# Report error and mark as failed
#
- self._message(
- element, MessageType.ERROR, "Post processing error", detail=str(e)
- )
+ self._message(element, MessageType.ERROR, "Post processing error", detail=str(e))
self._task_group.add_failed_task(element._get_full_name())
# Treat this as a task error as it's related to a task
@@ -351,10 +335,7 @@ class Queue:
# Report unhandled exceptions and mark as failed
#
self._message(
- element,
- MessageType.BUG,
- "Unhandled exception in post processing",
- detail=traceback.format_exc(),
+ element, MessageType.BUG, "Unhandled exception in post processing", detail=traceback.format_exc(),
)
self._task_group.add_failed_task(element._get_full_name())
else:
@@ -372,9 +353,7 @@ class Queue:
# Convenience wrapper for Queue implementations to send
# a message for the element they are processing
def _message(self, element, message_type, brief, **kwargs):
- message = Message(
- message_type, brief, element_name=element._get_full_name(), **kwargs
- )
+ message = Message(message_type, brief, element_name=element._get_full_name(), **kwargs)
self._scheduler.notify_messenger(message)
def _element_log_path(self, element):
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 946a7f0b1..e76158779 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -90,10 +90,7 @@ class Resources:
# available. If we don't have enough, the job cannot be
# scheduled.
for resource in resources:
- if (
- self._max_resources[resource] > 0
- and self._used_resources[resource] >= self._max_resources[resource]
- ):
+ if self._max_resources[resource] > 0 and self._used_resources[resource] >= self._max_resources[resource]:
return False
# Now we register the fact that our job is using the resources
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index 0555b1103..6268ec169 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -122,16 +122,12 @@ class Scheduler:
#
self.queues = None # Exposed for the frontend to print summaries
self.context = context # The Context object shared with Queues
- self.terminated = (
- False # Whether the scheduler was asked to terminate or has terminated
- )
+ self.terminated = False # Whether the scheduler was asked to terminate or has terminated
self.suspended = False # Whether the scheduler is currently suspended
# These are shared with the Job, but should probably be removed or made private in some way.
self.loop = None # Shared for Job access to observe the message queue
- self.internal_stops = (
- 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
- )
+ self.internal_stops = 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
#
# Private members
@@ -147,9 +143,7 @@ class Scheduler:
self._notification_queue = notification_queue
self._notifier = notifier
- self.resources = Resources(
- context.sched_builders, context.sched_fetchers, context.sched_pushers
- )
+ self.resources = Resources(context.sched_builders, context.sched_fetchers, context.sched_pushers)
# run()
#
@@ -191,9 +185,7 @@ class Scheduler:
_watcher.add_child_handler(casd_process.pid, self._abort_on_casd_failure)
# Start the profiler
- with PROFILER.profile(
- Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)
- ):
+ with PROFILER.profile(Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)):
# Run the queues
self._sched()
self.loop.run_forever()
@@ -349,9 +341,7 @@ class Scheduler:
# returncode (int): the return code with which buildbox-casd exited
#
def _abort_on_casd_failure(self, pid, returncode):
- message = Message(
- MessageType.BUG, "buildbox-casd died while the pipeline was active."
- )
+ message = Message(MessageType.BUG, "buildbox-casd died while the pipeline was active.")
self._notify(Notification(NotificationType.MESSAGE, message=message))
self._casd_process.returncode = returncode
@@ -407,9 +397,7 @@ class Scheduler:
# to fetch tasks for elements which failed to pull, and
# thus need all the pulls to complete before ever starting
# a build
- ready.extend(
- chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues))
- )
+ ready.extend(chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues)))
# harvest_jobs() may have decided to skip some jobs, making
# them eligible for promotion to the next queue as a side effect.
@@ -419,11 +407,7 @@ class Scheduler:
# Make sure fork is allowed before starting jobs
if not self.context.prepare_fork():
- message = Message(
- MessageType.BUG,
- "Fork is not allowed",
- detail="Background threads are active",
- )
+ message = Message(MessageType.BUG, "Fork is not allowed", detail="Background threads are active",)
self._notify(Notification(NotificationType.MESSAGE, message=message))
self.terminate_jobs()
return
@@ -484,9 +468,7 @@ class Scheduler:
# Notify that we're unsuspended
self._notify(Notification(NotificationType.SUSPENDED))
self._starttime += datetime.datetime.now() - self._suspendtime
- self._notify(
- Notification(NotificationType.SCHED_START_TIME, time=self._starttime)
- )
+ self._notify(Notification(NotificationType.SCHED_START_TIME, time=self._starttime))
self._suspendtime = None
# _interrupt_event():
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index 66e1c1bb9..29d0bd5ef 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -190,9 +190,7 @@ class SourceCache(BaseCache):
vdir.import_files(self.export(previous_source))
if not source.BST_STAGE_VIRTUAL_DIRECTORY:
- with utils._tempdir(
- dir=self.context.tmpdir, prefix="staging-temp"
- ) as tmpdir:
+ with utils._tempdir(dir=self.context.tmpdir, prefix="staging-temp") as tmpdir:
if not vdir.is_empty():
vdir.export_files(tmpdir)
source._stage(tmpdir)
@@ -244,15 +242,11 @@ class SourceCache(BaseCache):
source_proto = self._pull_source(ref, remote)
if source_proto is None:
source.info(
- "Remote source service ({}) does not have source {} cached".format(
- remote, display_key
- )
+ "Remote source service ({}) does not have source {} cached".format(remote, display_key)
)
continue
except CASError as e:
- raise SourceCacheError(
- "Failed to pull source {}: {}".format(display_key, e)
- ) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
if not source_proto:
return False
@@ -260,40 +254,26 @@ class SourceCache(BaseCache):
for remote in storage_remotes:
try:
remote.init()
- source.status(
- "Pulling data for source {} <- {}".format(display_key, remote)
- )
+ source.status("Pulling data for source {} <- {}".format(display_key, remote))
# Fetch source blobs
self.cas._fetch_directory(remote, source_proto.files)
- required_blobs = self.cas.required_blobs_for_directory(
- source_proto.files
- )
+ required_blobs = self.cas.required_blobs_for_directory(source_proto.files)
missing_blobs = self.cas.local_missing_blobs(required_blobs)
missing_blobs = self.cas.fetch_blobs(remote, missing_blobs)
if missing_blobs:
- source.info(
- "Remote cas ({}) does not have source {} cached".format(
- remote, display_key
- )
- )
+ source.info("Remote cas ({}) does not have source {} cached".format(remote, display_key))
continue
source.info("Pulled source {} <- {}".format(display_key, remote))
return True
except BlobNotFound as e:
# Not all blobs are available on this remote
- source.info(
- "Remote cas ({}) does not have blob {} cached".format(
- remote, e.blob
- )
- )
+ source.info("Remote cas ({}) does not have blob {} cached".format(remote, e.blob))
continue
except CASError as e:
- raise SourceCacheError(
- "Failed to pull source {}: {}".format(display_key, e)
- ) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
return False
@@ -325,18 +305,14 @@ class SourceCache(BaseCache):
display_key = source._get_brief_display_key()
for remote in storage_remotes:
remote.init()
- source.status(
- "Pushing data for source {} -> {}".format(display_key, remote)
- )
+ source.status("Pushing data for source {} -> {}".format(display_key, remote))
source_proto = self._get_source(ref)
try:
self.cas._send_directory(remote, source_proto.files)
pushed_storage = True
except CASRemoteError:
- source.info(
- "Failed to push source files {} -> {}".format(display_key, remote)
- )
+ source.info("Failed to push source files {} -> {}".format(display_key, remote))
continue
for remote in index_remotes:
@@ -345,19 +321,11 @@ class SourceCache(BaseCache):
# check whether cache has files already
if self._pull_source(ref, remote) is not None:
- source.info(
- "Remote ({}) already has source {} cached".format(
- remote, display_key
- )
- )
+ source.info("Remote ({}) already has source {} cached".format(remote, display_key))
continue
if not self._push_source(ref, remote):
- source.info(
- "Failed to push source metadata {} -> {}".format(
- display_key, remote
- )
- )
+ source.info("Failed to push source metadata {} -> {}".format(display_key, remote))
continue
source.info("Pushed source {} -> {}".format(display_key, remote))
@@ -388,9 +356,7 @@ class SourceCache(BaseCache):
source_proto.ParseFromString(f.read())
return source_proto
except FileNotFoundError as e:
- raise SourceCacheError(
- "Attempted to access unavailable source: {}".format(e)
- ) from e
+ raise SourceCacheError("Attempted to access unavailable source: {}".format(e)) from e
def _source_path(self, ref):
return os.path.join(self.sourcerefdir, ref)
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index 07e1f8c9e..d85e348f2 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -244,9 +244,7 @@ class State:
# TaskGroup: The task group created
#
def add_task_group(self, name, complete_name=None):
- assert (
- name not in self.task_groups
- ), "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
+ assert name not in self.task_groups, "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
group = TaskGroup(name, self, complete_name)
self.task_groups[name] = group
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 402473e33..aa14f12c9 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -74,13 +74,7 @@ from . import Scope, Consistency
#
class Stream:
def __init__(
- self,
- context,
- session_start,
- *,
- session_start_callback=None,
- interrupt_callback=None,
- ticker_callback=None
+ self, context, session_start, *, session_start_callback=None, interrupt_callback=None, ticker_callback=None
):
#
@@ -101,26 +95,18 @@ class Stream:
self._pipeline = None
self._state = State(session_start) # Owned by Stream, used by Core to set state
self._notification_queue = deque()
- self._starttime = (
- session_start # Synchronised with Scheduler's relative start time
- )
+ self._starttime = session_start # Synchronised with Scheduler's relative start time
context.messenger.set_state(self._state)
self._scheduler = Scheduler(
- context,
- session_start,
- self._state,
- self._notification_queue,
- self._scheduler_notification_handler,
+ context, session_start, self._state, self._notification_queue, self._scheduler_notification_handler,
)
self._first_non_track_queue = None
self._session_start_callback = session_start_callback
self._ticker_callback = ticker_callback
self._interrupt_callback = interrupt_callback
- self._notifier = (
- self._scheduler._stream_notification_handler
- ) # Assign the schedulers notification handler
+ self._notifier = self._scheduler._stream_notification_handler # Assign the schedulers notification handler
self._scheduler_running = False
self._scheduler_terminated = False
self._scheduler_suspended = False
@@ -177,9 +163,7 @@ class Stream:
use_artifact_config=False,
load_refs=False
):
- with PROFILER.profile(
- Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)
- ):
+ with PROFILER.profile(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)):
target_objects, _ = self._load(
targets,
(),
@@ -233,22 +217,15 @@ class Stream:
# in which case we just blindly trust the directory, using the element
# definitions to control the execution environment only.
if directory is None:
- missing_deps = [
- dep
- for dep in self._pipeline.dependencies([element], scope)
- if not dep._cached()
- ]
+ missing_deps = [dep for dep in self._pipeline.dependencies([element], scope) if not dep._cached()]
if missing_deps:
if not pull_dependencies:
raise StreamError(
"Elements need to be built or downloaded before staging a shell environment",
- detail="\n".join(
- list(map(lambda x: x._get_full_name(), missing_deps))
- ),
+ detail="\n".join(list(map(lambda x: x._get_full_name(), missing_deps))),
)
self._message(
- MessageType.INFO,
- "Attempting to fetch missing or incomplete artifacts",
+ MessageType.INFO, "Attempting to fetch missing or incomplete artifacts",
)
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
@@ -264,8 +241,7 @@ class Stream:
# Attempt a pull queue for the given element if remote and context allow it
if require_buildtree:
self._message(
- MessageType.INFO,
- "Attempting to fetch missing artifact buildtree",
+ MessageType.INFO, "Attempting to fetch missing artifact buildtree",
)
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
@@ -280,20 +256,12 @@ class Stream:
if usebuildtree == "always":
raise StreamError(message)
- self._message(
- MessageType.INFO, message + ", shell will be loaded without it"
- )
+ self._message(MessageType.INFO, message + ", shell will be loaded without it")
else:
buildtree = True
return element._shell(
- scope,
- directory,
- mounts=mounts,
- isolate=isolate,
- prompt=prompt,
- command=command,
- usebuildtree=buildtree,
+ scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command, usebuildtree=buildtree,
)
# build()
@@ -309,14 +277,7 @@ class Stream:
# If `remote` specified as None, then regular configuration will be used
# to determine where to push artifacts to.
#
- def build(
- self,
- targets,
- *,
- selection=PipelineSelection.PLAN,
- ignore_junction_targets=False,
- remote=None
- ):
+ def build(self, targets, *, selection=PipelineSelection.PLAN, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -336,9 +297,7 @@ class Stream:
# Assert that the elements are consistent
self._pipeline.assert_consistent(elements)
- if all(
- project.remote_execution_specs for project in self._context.get_projects()
- ):
+ if all(project.remote_execution_specs for project in self._context.get_projects()):
# Remote execution is configured for all projects.
# Require artifact files only for target elements and their runtime dependencies.
self._context.set_artifact_files_optional()
@@ -434,14 +393,7 @@ class Stream:
# If no error is encountered while tracking, then the project files
# are rewritten inline.
#
- def track(
- self,
- targets,
- *,
- selection=PipelineSelection.REDIRECT,
- except_targets=None,
- cross_junctions=False
- ):
+ def track(self, targets, *, selection=PipelineSelection.REDIRECT, except_targets=None, cross_junctions=False):
# We pass no target to build. Only to track. Passing build targets
# would fully load project configuration which might not be
@@ -475,14 +427,7 @@ class Stream:
# If `remote` specified as None, then regular configuration will be used
# to determine where to pull artifacts from.
#
- def pull(
- self,
- targets,
- *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None
- ):
+ def pull(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -524,14 +469,7 @@ class Stream:
# a pull queue will be created if user context and available remotes allow for
# attempting to fetch them.
#
- def push(
- self,
- targets,
- *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None
- ):
+ def push(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -555,9 +493,7 @@ class Stream:
# Check if we require a pull queue, with given artifact state and context
require_buildtrees = self._buildtree_pull_required(elements)
if require_buildtrees:
- self._message(
- MessageType.INFO, "Attempting to fetch missing artifact buildtrees"
- )
+ self._message(MessageType.INFO, "Attempting to fetch missing artifact buildtrees")
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtrees)
@@ -589,10 +525,7 @@ class Stream:
# NOTE: Usually we check the _SchedulerErrorAction when a *job* has failed.
# However, we cannot create a PushQueue job unless we intentionally
# ready an uncached element in the PushQueue.
- if (
- self._context.sched_error_action == _SchedulerErrorAction.CONTINUE
- and uncached_elements
- ):
+ if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
names = [element.name for element in uncached_elements]
fail_str = (
"Error while pushing. The following elements were not pushed as they are "
@@ -635,9 +568,7 @@ class Stream:
tar=False
):
- elements, _ = self._load(
- (target,), (), selection=selection, use_artifact_config=True, load_refs=True
- )
+ elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True, load_refs=True)
# self.targets contains a list of the loaded target objects
# if we specify --deps build, Stream._load() will return a list
@@ -649,9 +580,7 @@ class Stream:
uncached_elts = [elt for elt in elements if not elt._cached()]
if uncached_elts and pull:
- self._message(
- MessageType.INFO, "Attempting to fetch missing or incomplete artifact"
- )
+ self._message(MessageType.INFO, "Attempting to fetch missing or incomplete artifact")
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(uncached_elts)
@@ -664,19 +593,13 @@ class Stream:
"none": Scope.NONE,
"all": Scope.ALL,
}
- with target._prepare_sandbox(
- scope=scope[selection], directory=None, integrate=integrate
- ) as sandbox:
+ with target._prepare_sandbox(scope=scope[selection], directory=None, integrate=integrate) as sandbox:
# Copy or move the sandbox to the target directory
virdir = sandbox.get_virtual_directory()
- self._export_artifact(
- tar, location, compression, target, hardlinks, virdir
- )
+ self._export_artifact(tar, location, compression, target, hardlinks, virdir)
except BstError as e:
raise StreamError(
- "Error while staging dependencies into a sandbox" ": '{}'".format(e),
- detail=e.detail,
- reason=e.reason,
+ "Error while staging dependencies into a sandbox" ": '{}'".format(e), detail=e.detail, reason=e.reason,
) from e
# _export_artifact()
@@ -729,9 +652,7 @@ class Stream:
#
def artifact_show(self, targets, *, selection=PipelineSelection.NONE):
# Obtain list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=selection, use_artifact_config=True, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=selection, use_artifact_config=True, load_refs=True)
if self._artifacts.has_fetch_remotes():
self._pipeline.check_remotes(target_objects)
@@ -756,9 +677,7 @@ class Stream:
#
def artifact_log(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=PipelineSelection.NONE, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
artifact_logs = {}
for obj in target_objects:
@@ -767,9 +686,7 @@ class Stream:
self._message(MessageType.WARN, "{} is not cached".format(ref))
continue
elif not obj._cached_logs():
- self._message(
- MessageType.WARN, "{} is cached without log files".format(ref)
- )
+ self._message(MessageType.WARN, "{} is cached without log files".format(ref))
continue
artifact_logs[obj.name] = obj.get_logs()
@@ -788,9 +705,7 @@ class Stream:
#
def artifact_list_contents(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=PipelineSelection.NONE, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
elements_to_files = {}
for obj in target_objects:
@@ -814,9 +729,7 @@ class Stream:
#
def artifact_delete(self, targets, *, selection=PipelineSelection.NONE):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=selection, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=selection, load_refs=True)
# Some of the targets may refer to the same key, so first obtain a
# set of the refs to be removed.
@@ -869,9 +782,7 @@ class Stream:
self._check_location_writable(location, force=force, tar=tar)
- elements, _ = self._load(
- (target,), (), selection=deps, except_targets=except_targets
- )
+ elements, _ = self._load((target,), (), selection=deps, except_targets=except_targets)
# Assert all sources are cached in the source dir
self._fetch(elements)
@@ -879,14 +790,10 @@ class Stream:
# Stage all sources determined by scope
try:
- self._source_checkout(
- elements, location, force, deps, tar, compression, include_build_scripts
- )
+ self._source_checkout(elements, location, force, deps, tar, compression, include_build_scripts)
except BstError as e:
raise StreamError(
- "Error while writing sources" ": '{}'".format(e),
- detail=e.detail,
- reason=e.reason,
+ "Error while writing sources" ": '{}'".format(e), detail=e.detail, reason=e.reason,
) from e
self._message(MessageType.INFO, "Checked out sources to '{}'".format(location))
@@ -934,18 +841,12 @@ class Stream:
for target in elements:
if not list(target.sources()):
- build_depends = [
- x.name for x in target.dependencies(Scope.BUILD, recurse=False)
- ]
+ build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
if not build_depends:
- raise StreamError(
- "The element {} has no sources".format(target.name)
- )
+ raise StreamError("The element {} has no sources".format(target.name))
detail = "Try opening a workspace on one of its dependencies instead:\n"
detail += " \n".join(build_depends)
- raise StreamError(
- "The element {} has no sources".format(target.name), detail=detail
- )
+ raise StreamError("The element {} has no sources".format(target.name), detail=detail)
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
@@ -962,16 +863,10 @@ class Stream:
target.name, workspace.get_absolute_path()
)
)
- self.workspace_close(
- target._get_full_name(), remove_dir=not no_checkout
- )
+ self.workspace_close(target._get_full_name(), remove_dir=not no_checkout)
target_consistency = target._get_consistency()
- if (
- not no_checkout
- and target_consistency < Consistency.CACHED
- and target_consistency._source_cached()
- ):
+ if not no_checkout and target_consistency < Consistency.CACHED and target_consistency._source_cached():
raise StreamError(
"Could not stage uncached source. For {} ".format(target.name)
+ "Use `--track` to track and "
@@ -980,9 +875,7 @@ class Stream:
)
if not custom_dir:
- directory = os.path.abspath(
- os.path.join(self._context.workspacedir, target.name)
- )
+ directory = os.path.abspath(os.path.join(self._context.workspacedir, target.name))
if directory[-4:] == ".bst":
directory = directory[:-4]
expanded_directories.append(directory)
@@ -1006,17 +899,13 @@ class Stream:
if os.path.exists(directory):
if not os.path.isdir(directory):
raise StreamError(
- "For element '{}', Directory path is not a directory: {}".format(
- target.name, directory
- ),
+ "For element '{}', Directory path is not a directory: {}".format(target.name, directory),
reason="bad-directory",
)
if not (no_checkout or force) and os.listdir(directory):
raise StreamError(
- "For element '{}', Directory path is not empty: {}".format(
- target.name, directory
- ),
+ "For element '{}', Directory path is not empty: {}".format(target.name, directory),
reason="bad-directory",
)
if os.listdir(directory):
@@ -1028,8 +917,7 @@ class Stream:
targetGenerator = zip(elements, expanded_directories)
for target, directory in targetGenerator:
self._message(
- MessageType.INFO,
- "Creating workspace for element {}".format(target.name),
+ MessageType.INFO, "Creating workspace for element {}".format(target.name),
)
workspace = workspaces.get_workspace(target._get_full_name())
@@ -1040,22 +928,15 @@ class Stream:
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
- todo_elements = " ".join(
- [str(target.name) for target, directory_dict in targetGenerator]
- )
+ todo_elements = " ".join([str(target.name) for target, directory_dict in targetGenerator])
if todo_elements:
# This output should make creating the remaining workspaces as easy as possible.
- todo_elements = (
- "\nDid not try to create workspaces for " + todo_elements
- )
- raise StreamError(
- "Failed to create workspace directory: {}".format(e) + todo_elements
- ) from e
+ todo_elements = "\nDid not try to create workspaces for " + todo_elements
+ raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
workspaces.create_workspace(target, directory, checkout=not no_checkout)
self._message(
- MessageType.INFO,
- "Created a workspace for element: {}".format(target._get_full_name()),
+ MessageType.INFO, "Created a workspace for element: {}".format(target._get_full_name()),
)
# workspace_close
@@ -1078,11 +959,7 @@ class Stream:
try:
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
- raise StreamError(
- "Could not remove '{}': {}".format(
- workspace.get_absolute_path(), e
- )
- ) from e
+ raise StreamError("Could not remove '{}': {}".format(workspace.get_absolute_path(), e)) from e
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
@@ -1102,10 +979,7 @@ class Stream:
def workspace_reset(self, targets, *, soft, track_first):
elements, _ = self._load(
- targets,
- [],
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
+ targets, [], selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT,
)
nonexisting = []
@@ -1123,10 +997,7 @@ class Stream:
if soft:
workspace.prepared = False
self._message(
- MessageType.INFO,
- "Reset workspace state for {} at: {}".format(
- element.name, workspace_path
- ),
+ MessageType.INFO, "Reset workspace state for {} at: {}".format(element.name, workspace_path),
)
continue
@@ -1218,10 +1089,7 @@ class Stream:
output_elements.add(e)
if load_elements:
loaded_elements, _ = self._load(
- load_elements,
- (),
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
+ load_elements, (), selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT,
)
for e in loaded_elements:
@@ -1379,15 +1247,9 @@ class Stream:
if target_artifacts:
if not load_refs:
detail = "\n".join(target_artifacts)
- raise ArtifactElementError(
- "Cannot perform this operation with artifact refs:", detail=detail
- )
+ raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
if selection in (PipelineSelection.ALL, PipelineSelection.RUN):
- raise StreamError(
- "Error: '--deps {}' is not supported for artifact refs".format(
- selection
- )
- )
+ raise StreamError("Error: '--deps {}' is not supported for artifact refs".format(selection))
# Load rewritable if we have any tracking selection to make
rewritable = False
@@ -1402,12 +1264,7 @@ class Stream:
track_except_targets,
]
if any(loadable):
- (
- elements,
- except_elements,
- track_elements,
- track_except_elements,
- ) = self._pipeline.load(
+ (elements, except_elements, track_elements, track_except_elements,) = self._pipeline.load(
loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces
)
else:
@@ -1419,9 +1276,7 @@ class Stream:
)
# Load all target artifacts
- artifacts = (
- self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
- )
+ artifacts = self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
# Optionally filter out junction elements
if ignore_junction_targets:
@@ -1437,10 +1292,7 @@ class Stream:
# This can happen with `bst build --track`
#
if targets and not self._pipeline.targets_include(elements, track_elements):
- raise StreamError(
- "Specified tracking targets that are not "
- "within the scope of primary targets"
- )
+ raise StreamError("Specified tracking targets that are not " "within the scope of primary targets")
# First take care of marking tracking elements, this must be
# done before resolving element states.
@@ -1462,14 +1314,10 @@ class Stream:
for project, project_elements in track_projects.items():
selected = self._pipeline.get_selection(project_elements, track_selection)
- selected = self._pipeline.track_cross_junction_filter(
- project, selected, track_cross_junctions
- )
+ selected = self._pipeline.track_cross_junction_filter(project, selected, track_cross_junctions)
track_selected.extend(selected)
- track_selected = self._pipeline.except_elements(
- track_elements, track_selected, track_except_elements
- )
+ track_selected = self._pipeline.except_elements(track_elements, track_selected, track_except_elements)
for element in track_selected:
element._schedule_tracking()
@@ -1483,20 +1331,14 @@ class Stream:
project.ensure_fully_loaded()
# Connect to remote caches, this needs to be done before resolving element state
- self._artifacts.setup_remotes(
- use_config=use_artifact_config, remote_url=artifact_remote_url
- )
- self._sourcecache.setup_remotes(
- use_config=use_source_config, remote_url=source_remote_url
- )
+ self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
+ self._sourcecache.setup_remotes(use_config=use_source_config, remote_url=source_remote_url)
# Now move on to loading primary selection.
#
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
- selected = self._pipeline.except_elements(
- self.targets, selected, except_elements
- )
+ selected = self._pipeline.except_elements(self.targets, selected, except_elements)
if selection == PipelineSelection.PLAN and dynamic_plan:
# We use a dynamic build plan, only request artifacts of top-level targets,
@@ -1557,9 +1399,7 @@ class Stream:
# unique_id (str): A unique_id to load an Element instance
#
def _failure_retry(self, action_name, unique_id):
- notification = Notification(
- NotificationType.RETRY, job_action=action_name, element=unique_id
- )
+ notification = Notification(NotificationType.RETRY, job_action=action_name, element=unique_id)
self._notify(notification)
# _run()
@@ -1576,9 +1416,7 @@ class Stream:
if self._session_start_callback is not None:
self._session_start_callback()
- status = self._scheduler.run(
- self.queues, self._context.get_cascache().get_casd_process()
- )
+ status = self._scheduler.run(self.queues, self._context.get_cascache().get_casd_process())
if status == SchedStatus.ERROR:
raise StreamError()
@@ -1643,17 +1481,11 @@ class Stream:
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
- raise StreamError(
- "Failed to create destination directory: '{}'".format(e)
- ) from e
+ raise StreamError("Failed to create destination directory: '{}'".format(e)) from e
if not os.access(location, os.W_OK):
- raise StreamError(
- "Destination directory '{}' not writable".format(location)
- )
+ raise StreamError("Destination directory '{}' not writable".format(location))
if not force and os.listdir(location):
- raise StreamError(
- "Destination directory '{}' not empty".format(location)
- )
+ raise StreamError("Destination directory '{}' not empty".format(location))
elif os.path.exists(location) and location != "-":
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable".format(location))
@@ -1666,9 +1498,7 @@ class Stream:
try:
utils.safe_remove(directory)
except OSError as e:
- raise StreamError(
- "Failed to remove checkout directory: {}".format(e)
- ) from e
+ raise StreamError("Failed to remove checkout directory: {}".format(e)) from e
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
@@ -1698,9 +1528,7 @@ class Stream:
else:
self._move_directory(temp_source_dir.name, location, force)
except OSError as e:
- raise StreamError(
- "Failed to checkout sources to {}: {}".format(location, e)
- ) from e
+ raise StreamError("Failed to checkout sources to {}: {}".format(location, e)) from e
finally:
with suppress(FileNotFoundError):
temp_source_dir.cleanup()
@@ -1819,11 +1647,7 @@ class Stream:
for element in elements:
# Check if element is partially cached without its buildtree, as the element
# artifact may not be cached at all
- if (
- element._cached()
- and not element._cached_buildtree()
- and element._buildtree_exists()
- ):
+ if element._cached() and not element._cached_buildtree() and element._buildtree_exists():
required_list.append(element)
return required_list
@@ -1877,10 +1701,7 @@ class Stream:
artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
if not artifact_refs:
self._message(
- MessageType.WARN,
- "No artifacts found for globs: {}".format(
- ", ".join(artifact_globs)
- ),
+ MessageType.WARN, "No artifacts found for globs: {}".format(", ".join(artifact_globs)),
)
return element_targets, artifact_refs
@@ -1897,16 +1718,12 @@ class Stream:
elif notification.notification_type == NotificationType.TICK:
self._ticker_callback()
elif notification.notification_type == NotificationType.JOB_START:
- self._state.add_task(
- notification.job_action, notification.full_name, notification.time
- )
+ self._state.add_task(notification.job_action, notification.full_name, notification.time)
elif notification.notification_type == NotificationType.JOB_COMPLETE:
self._state.remove_task(notification.job_action, notification.full_name)
if notification.job_status == JobStatus.FAIL:
self._state.fail_task(
- notification.job_action,
- notification.full_name,
- notification.element,
+ notification.job_action, notification.full_name, notification.element,
)
elif notification.notification_type == NotificationType.SCHED_START_TIME:
self._starttime = notification.time
diff --git a/src/buildstream/_version.py b/src/buildstream/_version.py
index 12dde1df8..baeb00699 100644
--- a/src/buildstream/_version.py
+++ b/src/buildstream/_version.py
@@ -134,10 +134,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
root = os.path.dirname(root) # up a level
if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
+ print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -254,15 +251,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(
GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- "%s%s" % (tag_prefix, tag_regex),
- ],
+ ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s%s" % (tag_prefix, tag_regex),],
cwd=root,
)
# --long was added in git-1.5.5
@@ -305,10 +294,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
+ pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix,)
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
@@ -325,9 +311,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
- 0
- ].strip()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 488630634..45ae87a87 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -92,9 +92,7 @@ class WorkspaceProject:
def from_dict(cls, directory, dictionary):
# Only know how to handle one format-version at the moment.
format_version = int(dictionary["format-version"])
- assert (
- format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION
- ), "Format version {} not found in {}".format(
+ assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, "Format version {} not found in {}".format(
BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary
)
@@ -149,9 +147,7 @@ class WorkspaceProject:
#
def add_project(self, project_path, element_name):
assert project_path and element_name
- self._projects.append(
- {"project-path": project_path, "element-name": element_name}
- )
+ self._projects.append({"project-path": project_path, "element-name": element_name})
# WorkspaceProjectCache()
@@ -222,8 +218,7 @@ class WorkspaceProjectCache:
workspace_project = self.get(directory)
if not workspace_project:
raise LoadError(
- "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE),
- LoadErrorReason.MISSING_FILE,
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE), LoadErrorReason.MISSING_FILE,
)
path = workspace_project.get_filename()
try:
@@ -250,15 +245,7 @@ class WorkspaceProjectCache:
# made obsolete with failed build artifacts.
#
class Workspace:
- def __init__(
- self,
- toplevel_project,
- *,
- last_successful=None,
- path=None,
- prepared=False,
- running_files=None
- ):
+ def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
self.prepared = prepared
self.last_successful = last_successful
self._path = path
@@ -336,9 +323,7 @@ class Workspace:
if os.path.isdir(fullpath):
utils.copy_files(fullpath, directory)
else:
- destfile = os.path.join(
- directory, os.path.basename(self.get_absolute_path())
- )
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
utils.safe_copy(fullpath, destfile)
# add_running_files()
@@ -381,8 +366,7 @@ class Workspace:
stat = os.lstat(filename)
except OSError as e:
raise LoadError(
- "Failed to stat file in workspace: {}".format(e),
- LoadErrorReason.MISSING_FILE,
+ "Failed to stat file in workspace: {}".format(e), LoadErrorReason.MISSING_FILE,
)
# Use the mtime of any file with sub second precision
@@ -397,16 +381,12 @@ class Workspace:
if os.path.isdir(fullpath):
filelist = utils.list_relative_paths(fullpath)
filelist = [
- (relpath, os.path.join(fullpath, relpath))
- for relpath in filelist
- if relpath not in excluded_files
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist if relpath not in excluded_files
]
else:
filelist = [(self.get_absolute_path(), fullpath)]
- self._key = [
- (relpath, unique_key(fullpath)) for relpath, fullpath in filelist
- ]
+ self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
return self._key
@@ -462,25 +442,17 @@ class Workspaces:
else:
workspace_path = path
- self._workspaces[element_name] = Workspace(
- self._toplevel_project, path=workspace_path
- )
+ self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path)
if checkout:
with target.timed_activity("Staging sources to {}".format(path)):
target._open_workspace()
- workspace_project = self._workspace_project_cache.add(
- path, project_dir, element_name
- )
+ workspace_project = self._workspace_project_cache.add(path, project_dir, element_name)
project_file_path = workspace_project.get_filename()
if os.path.exists(project_file_path):
- target.warn(
- "{} was staged from this element's sources".format(
- WORKSPACE_PROJECT_FILE
- )
- )
+ target.warn("{} was staged from this element's sources".format(WORKSPACE_PROJECT_FILE))
workspace_project.write()
self.save_config()
@@ -556,10 +528,7 @@ class Workspaces:
config = {
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- element: workspace.to_dict()
- for element, workspace in self._workspaces.items()
- },
+ "workspaces": {element: workspace.to_dict() for element, workspace in self._workspaces.items()},
}
os.makedirs(self._bst_directory, exist_ok=True)
_yaml.roundtrip_dump(config, self._get_filename())
@@ -605,8 +574,7 @@ class Workspaces:
version = workspaces.get_int("format-version", default=0)
except ValueError:
raise LoadError(
- "Format version is not an integer in workspace configuration",
- LoadErrorReason.INVALID_DATA,
+ "Format version is not an integer in workspace configuration", LoadErrorReason.INVALID_DATA,
)
if version == 0:
@@ -626,16 +594,14 @@ class Workspaces:
+ "Please remove this element from '{}'."
)
raise LoadError(
- detail.format(element, self._get_filename()),
- LoadErrorReason.INVALID_DATA,
+ detail.format(element, self._get_filename()), LoadErrorReason.INVALID_DATA,
)
workspaces[element] = sources[0]
else:
raise LoadError(
- "Workspace config is in unexpected format.",
- LoadErrorReason.INVALID_DATA,
+ "Workspace config is in unexpected format.", LoadErrorReason.INVALID_DATA,
)
res = {
@@ -645,10 +611,7 @@ class Workspaces:
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
workspaces = workspaces.get_mapping("workspaces", default={})
- res = {
- element: self._load_workspace(node)
- for element, node in workspaces.items()
- }
+ res = {element: self._load_workspace(node) for element, node in workspaces.items()}
else:
raise LoadError(
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 55f0dc0c3..4fa678932 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -243,9 +243,7 @@ class BuildElement(Element):
if not commands or command_name == "configure-commands":
continue
- with sandbox.batch(
- SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)
- ):
+ with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -253,9 +251,7 @@ class BuildElement(Element):
# to - if an element later attempts to stage to a location
# that is not empty, we abort the build - in this case this
# will almost certainly happen.
- staged_build = os.path.join(
- self.get_variable("install-root"), self.get_variable("build-root")
- )
+ staged_build = os.path.join(self.get_variable("install-root"), self.get_variable("build-root"))
if os.path.isdir(staged_build) and os.listdir(staged_build):
self.warn(
@@ -272,9 +268,7 @@ class BuildElement(Element):
def prepare(self, sandbox):
commands = self.__commands["configure-commands"]
if commands:
- with sandbox.batch(
- SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"
- ):
+ with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -299,6 +293,4 @@ class BuildElement(Element):
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
#
- sandbox.run(
- ["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd
- )
+ sandbox.run(["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index fe9993d41..fbde79d0e 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -145,20 +145,10 @@ class ElementError(BstError):
"""
def __init__(
- self,
- message: str,
- *,
- detail: str = None,
- reason: str = None,
- collect: str = None,
- temporary: bool = False
+ self, message: str, *, detail: str = None, reason: str = None, collect: str = None, temporary: bool = False
):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.ELEMENT,
- reason=reason,
- temporary=temporary,
+ message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary,
)
self.collect = collect
@@ -233,11 +223,7 @@ class Element(Plugin):
"""
def __init__(
- self,
- context: "Context",
- project: "Project",
- meta: "MetaElement",
- plugin_conf: Dict[str, Any],
+ self, context: "Context", project: "Project", meta: "MetaElement", plugin_conf: Dict[str, Any],
):
self.__cache_key_dict = None # Dict for cache key calculation
@@ -268,31 +254,15 @@ class Element(Plugin):
self.__reverse_build_deps = set() # type: Set[Element]
# Direct reverse runtime dependency Elements
self.__reverse_runtime_deps = set() # type: Set[Element]
- self.__build_deps_without_strict_cache_key = (
- None # Number of build dependencies without a strict key
- )
- self.__runtime_deps_without_strict_cache_key = (
- None # Number of runtime dependencies without a strict key
- )
- self.__build_deps_without_cache_key = (
- None # Number of build dependencies without a cache key
- )
- self.__runtime_deps_without_cache_key = (
- None # Number of runtime dependencies without a cache key
- )
+ self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
+ self.__runtime_deps_without_strict_cache_key = None # Number of runtime dependencies without a strict key
+ self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
+ self.__runtime_deps_without_cache_key = None # Number of runtime dependencies without a cache key
self.__build_deps_uncached = None # Build dependencies which are not yet cached
- self.__runtime_deps_uncached = (
- None # Runtime dependencies which are not yet cached
- )
- self.__updated_strict_cache_keys_of_rdeps = (
- False # Whether we've updated strict cache keys of rdeps
- )
- self.__ready_for_runtime = (
- False # Whether the element and its runtime dependencies have cache keys
- )
- self.__ready_for_runtime_and_cached = (
- False # Whether all runtime deps are cached, as well as the element
- )
+ self.__runtime_deps_uncached = None # Runtime dependencies which are not yet cached
+ self.__updated_strict_cache_keys_of_rdeps = False # Whether we've updated strict cache keys of rdeps
+ self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
+ self.__ready_for_runtime_and_cached = False # Whether all runtime deps are cached, as well as the element
self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
self.__sources = [] # type: List[Source]
@@ -300,37 +270,21 @@ class Element(Plugin):
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
self.__sourcecache = context.sourcecache # Source cache
- self.__consistency = (
- Consistency.INCONSISTENT
- ) # Cached overall consistency state
+ self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
self.__assemble_scheduled = False # Element is scheduled to be assembled
self.__assemble_done = False # Element is assembled
self.__tracking_scheduled = False # Sources are scheduled to be tracked
self.__pull_done = False # Whether pull was attempted
- self.__cached_successfully = (
- None # If the Element is known to be successfully cached
- )
- self.__source_cached = (
- None # If the sources are known to be successfully cached
- )
+ self.__cached_successfully = None # If the Element is known to be successfully cached
+ self.__source_cached = None # If the sources are known to be successfully cached
self.__splits = None # Resolved regex objects for computing split domains
- self.__whitelist_regex = (
- None # Resolved regex object to check if file is allowed to overlap
- )
+ self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
# Location where Element.stage_sources() was called
self.__staged_sources_directory = None # type: Optional[str]
- self.__tainted = (
- None # Whether the artifact is tainted and should not be shared
- )
- self.__required = (
- False # Whether the artifact is required in the current session
- )
- self.__artifact_files_required = (
- False # Whether artifact files are required in the local cache
- )
- self.__build_result = (
- None # The result of assembling this Element (success, description, detail)
- )
+ self.__tainted = None # Whether the artifact is tainted and should not be shared
+ self.__required = False # Whether the artifact is required in the current session
+ self.__artifact_files_required = False # Whether artifact files are required in the local cache
+ self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
# Artifact class for direct artifact composite interaction
self.__artifact = None # type: Optional[Artifact]
@@ -341,12 +295,8 @@ class Element(Plugin):
# sources for staging
self.__last_source_requires_previous_ix = None
- self.__batch_prepare_assemble = (
- False # Whether batching across prepare()/assemble() is configured
- )
- self.__batch_prepare_assemble_flags = (
- 0 # Sandbox flags for batching across prepare()/assemble()
- )
+ self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
+ self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
# Collect dir for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # type: Optional[str]
@@ -356,9 +306,7 @@ class Element(Plugin):
self.__buildable_callback = None # Callback to BuildQueue
self._depth = None # Depth of Element in its current dependency graph
- self._resolved_initial_state = (
- False # Whether the initial state of the Element has been resolved
- )
+ self._resolved_initial_state = False # Whether the initial state of the Element has been resolved
# Ensure we have loaded this class's defaults
self.__init_defaults(project, plugin_conf, meta.kind, meta.is_junction)
@@ -421,11 +369,7 @@ class Element(Plugin):
Elements must implement this method to configure the sandbox object
for execution.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement configure_sandbox()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(kind=self.get_kind()))
def stage(self, sandbox: "Sandbox") -> None:
"""Stage inputs into the sandbox directories
@@ -441,11 +385,7 @@ class Element(Plugin):
objects, by staging the artifacts of the elements this element depends
on, or both.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement stage()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement stage()".format(kind=self.get_kind()))
def prepare(self, sandbox: "Sandbox") -> None:
"""Run one-off preparation commands.
@@ -482,11 +422,7 @@ class Element(Plugin):
Elements must implement this method to create an output
artifact from its sources and dependencies.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement assemble()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement assemble()".format(kind=self.get_kind()))
def generate_script(self) -> str:
"""Generate a build (sh) script to build this element
@@ -507,11 +443,7 @@ class Element(Plugin):
If the script fails, it is expected to return with an exit
code != 0.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement write_script()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement write_script()".format(kind=self.get_kind()))
#############################################################
# Public Methods #
@@ -525,9 +457,7 @@ class Element(Plugin):
for source in self.__sources:
yield source
- def dependencies(
- self, scope: Scope, *, recurse: bool = True, visited=None
- ) -> Iterator["Element"]:
+ def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
"""dependencies(scope, *, recurse=True)
A generator function which yields the dependencies of the given element.
@@ -560,13 +490,8 @@ class Element(Plugin):
visited[0].add(element._unique_id)
visited[1].add(element._unique_id)
- for dep in chain(
- element.__build_dependencies, element.__runtime_dependencies
- ):
- if (
- dep._unique_id not in visited[0]
- and dep._unique_id not in visited[1]
- ):
+ for dep in chain(element.__build_dependencies, element.__runtime_dependencies):
+ if dep._unique_id not in visited[0] and dep._unique_id not in visited[1]:
yield from visit(dep, Scope.ALL, visited)
yield element
@@ -640,9 +565,7 @@ class Element(Plugin):
return self.__variables.subst(node.as_str())
except LoadError as e:
provenance = node.get_provenance()
- raise LoadError(
- "{}: {}".format(provenance, e), e.reason, detail=e.detail
- ) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
def node_subst_sequence_vars(self, node: "SequenceNode[ScalarNode]") -> List[str]:
"""Substitute any variables in the given sequence
@@ -663,17 +586,11 @@ class Element(Plugin):
ret.append(self.__variables.subst(value.as_str()))
except LoadError as e:
provenance = value.get_provenance()
- raise LoadError(
- "{}: {}".format(provenance, e), e.reason, detail=e.detail
- ) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
return ret
def compute_manifest(
- self,
- *,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True
+ self, *, include: Optional[List[str]] = None, exclude: Optional[List[str]] = None, orphans: bool = True
) -> str:
"""Compute and return this element's selective manifest
@@ -769,9 +686,7 @@ class Element(Plugin):
+ "Try building the element first with `bst build`\n"
)
raise ElementError(
- "No artifacts to stage",
- detail=detail,
- reason="uncached-checkout-attempt",
+ "No artifacts to stage", detail=detail, reason="uncached-checkout-attempt",
)
if update_mtimes is None:
@@ -780,9 +695,7 @@ class Element(Plugin):
# Time to use the artifact, check once more that it's there
self.__assert_cached()
- with self.timed_activity(
- "Staging {}/{}".format(self.name, self._get_brief_display_key())
- ):
+ with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
# Disable type checking since we can't easily tell mypy that
# `self.__artifact` can't be None at this stage.
files_vdir = self.__artifact.get_files() # type: ignore
@@ -790,11 +703,7 @@ class Element(Plugin):
# Hard link it into the staging area
#
vbasedir = sandbox.get_virtual_directory()
- vstagedir = (
- vbasedir
- if path is None
- else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
- )
+ vstagedir = vbasedir if path is None else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
split_filter = self.__split_filter_func(include, exclude, orphans)
@@ -802,31 +711,21 @@ class Element(Plugin):
if update_mtimes:
def link_filter(path):
- return (
- split_filter is None or split_filter(path)
- ) and path not in update_mtimes
+ return (split_filter is None or split_filter(path)) and path not in update_mtimes
def copy_filter(path):
- return (
- split_filter is None or split_filter(path)
- ) and path in update_mtimes
+ return (split_filter is None or split_filter(path)) and path in update_mtimes
else:
link_filter = split_filter
result = vstagedir.import_files(
- files_vdir,
- filter_callback=link_filter,
- report_written=True,
- can_link=True,
+ files_vdir, filter_callback=link_filter, report_written=True, can_link=True,
)
if update_mtimes:
copy_result = vstagedir.import_files(
- files_vdir,
- filter_callback=copy_filter,
- report_written=True,
- update_mtime=True,
+ files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=True,
)
result = result.combine(copy_result)
@@ -875,9 +774,7 @@ class Element(Plugin):
# build is still in the artifact cache
#
if self.__artifacts.contains(self, workspace.last_successful):
- last_successful = Artifact(
- self, context, strong_key=workspace.last_successful
- )
+ last_successful = Artifact(self, context, strong_key=workspace.last_successful)
# Get a dict of dependency strong keys
old_dep_keys = last_successful.get_metadata_dependencies()
else:
@@ -886,9 +783,7 @@ class Element(Plugin):
workspace.prepared = False
workspace.last_successful = None
- self.info(
- "Resetting workspace state, last successful build is no longer in the cache"
- )
+ self.info("Resetting workspace state, last successful build is no longer in the cache")
# In case we are staging in the main process
if utils._is_main_process():
@@ -920,12 +815,7 @@ class Element(Plugin):
context.get_workspaces().save_config()
result = dep.stage_artifact(
- sandbox,
- path=path,
- include=include,
- exclude=exclude,
- orphans=orphans,
- update_mtimes=to_update,
+ sandbox, path=path, include=include, exclude=exclude, orphans=orphans, update_mtimes=to_update,
)
if result.overwritten:
for overwrite in result.overwritten:
@@ -956,15 +846,11 @@ class Element(Plugin):
overlap_warning_elements.append(elm)
overlap_warning = True
- warning_detail += _overlap_error_detail(
- f, overlap_warning_elements, elements
- )
+ warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
if overlap_warning:
self.warn(
- "Non-whitelisted overlaps detected",
- detail=warning_detail,
- warning_token=CoreWarnings.OVERLAPS,
+ "Non-whitelisted overlaps detected", detail=warning_detail, warning_token=CoreWarnings.OVERLAPS,
)
if ignored:
@@ -995,9 +881,7 @@ class Element(Plugin):
for command in commands:
cmd = self.node_subst_vars(command)
- sandbox.run(
- ["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd
- )
+ sandbox.run(["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd)
def stage_sources(self, sandbox: "Sandbox", directory: str) -> None:
"""Stage this element's sources to a directory in the sandbox
@@ -1083,9 +967,7 @@ class Element(Plugin):
# Flat is not recognized correctly by Pylint as being a dictionary
return self.__variables.flat.get(varname) # pylint: disable=no-member
- def batch_prepare_assemble(
- self, flags: int, *, collect: Optional[str] = None
- ) -> None:
+ def batch_prepare_assemble(self, flags: int, *, collect: Optional[str] = None) -> None:
""" Configure command batching across prepare() and assemble()
Args:
@@ -1097,11 +979,7 @@ class Element(Plugin):
to enable batching of all sandbox commands issued in prepare() and assemble().
"""
if self.__batch_prepare_assemble:
- raise ElementError(
- "{}: Command batching for prepare/assemble is already configured".format(
- self
- )
- )
+ raise ElementError("{}: Command batching for prepare/assemble is already configured".format(self))
self.__batch_prepare_assemble = True
self.__batch_prepare_assemble_flags = flags
@@ -1500,8 +1378,7 @@ class Element(Plugin):
+ "To start using the new reference, please close the existing workspace."
)
source.warn(
- "Updated reference will be ignored as source has open workspace",
- detail=detail,
+ "Updated reference will be ignored as source has open workspace", detail=detail,
)
return refs
@@ -1512,16 +1389,11 @@ class Element(Plugin):
# is used to stage things by the `bst artifact checkout` codepath
#
@contextmanager
- def _prepare_sandbox(
- self, scope, directory, shell=False, integrate=True, usebuildtree=False
- ):
+ def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
# bst shell and bst artifact checkout require a local sandbox.
bare_directory = bool(directory)
with self.__sandbox(
- directory,
- config=self.__sandbox_config,
- allow_remote=False,
- bare_directory=bare_directory,
+ directory, config=self.__sandbox_config, allow_remote=False, bare_directory=bare_directory,
) as sandbox:
sandbox._usebuildtree = usebuildtree
@@ -1534,9 +1406,7 @@ class Element(Plugin):
self.stage(sandbox)
else:
# Stage deps in the sandbox root
- with self.timed_activity(
- "Staging dependencies", silent_nested=True
- ):
+ with self.timed_activity("Staging dependencies", silent_nested=True):
self.stage_dependency_artifacts(sandbox, scope)
# Run any integration commands provided by the dependencies
@@ -1565,9 +1435,7 @@ class Element(Plugin):
# Stage all sources that need to be copied
sandbox_vroot = sandbox.get_virtual_directory()
- host_vdirectory = sandbox_vroot.descend(
- *directory.lstrip(os.sep).split(os.sep), create=True
- )
+ host_vdirectory = sandbox_vroot.descend(*directory.lstrip(os.sep).split(os.sep), create=True)
self._stage_sources_at(host_vdirectory, usebuildtree=sandbox._usebuildtree)
# _stage_sources_at():
@@ -1593,9 +1461,7 @@ class Element(Plugin):
if not isinstance(vdirectory, Directory):
vdirectory = FileBasedDirectory(vdirectory)
if not vdirectory.is_empty():
- raise ElementError(
- "Staging directory '{}' is not empty".format(vdirectory)
- )
+ raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
# Check if we have a cached buildtree to use
if usebuildtree:
@@ -1603,10 +1469,7 @@ class Element(Plugin):
if import_dir.is_empty():
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
self.warn(
- "WARNING: {} Artifact contains an empty buildtree".format(
- self.name
- ),
- detail=detail,
+ "WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail,
)
# No cached buildtree, stage source from source cache
@@ -1628,16 +1491,10 @@ class Element(Plugin):
import_dir.import_files(source_dir)
except SourceCacheError as e:
- raise ElementError(
- "Error trying to export source for {}: {}".format(
- self.name, e
- )
- )
+ raise ElementError("Error trying to export source for {}: {}".format(self.name, e))
except VirtualDirectoryError as e:
raise ElementError(
- "Error trying to import sources together for {}: {}".format(
- self.name, e
- ),
+ "Error trying to import sources together for {}: {}".format(self.name, e),
reason="import-source-files-fail",
)
@@ -1747,9 +1604,7 @@ class Element(Plugin):
self._update_ready_for_runtime_and_cached()
if self._get_workspace() and self._cached_success():
- assert (
- utils._is_main_process()
- ), "Attempted to save workspace configuration from child process"
+ assert utils._is_main_process(), "Attempted to save workspace configuration from child process"
#
# Note that this block can only happen in the
# main process, since `self._cached_success()` cannot
@@ -1794,9 +1649,7 @@ class Element(Plugin):
# Explicitly clean it up, keep the build dir around if exceptions are raised
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(
- prefix="{}-".format(self.normal_name), dir=context.builddir
- )
+ rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
# Cleanup the build directory on explicit SIGTERM
def cleanup_rootdir():
@@ -1812,12 +1665,8 @@ class Element(Plugin):
buildroot = self.get_variable("build-root")
cache_buildtrees = context.cache_buildtrees
if cache_buildtrees != _CacheBuildTrees.NEVER:
- always_cache_buildtrees = (
- cache_buildtrees == _CacheBuildTrees.ALWAYS
- )
- sandbox._set_build_directory(
- buildroot, always=always_cache_buildtrees
- )
+ always_cache_buildtrees = cache_buildtrees == _CacheBuildTrees.ALWAYS
+ sandbox._set_build_directory(buildroot, always=always_cache_buildtrees)
if not self.BST_RUN_COMMANDS:
# Element doesn't need to run any commands in the sandbox.
@@ -1839,8 +1688,7 @@ class Element(Plugin):
try:
if self.__batch_prepare_assemble:
cm = sandbox.batch(
- self.__batch_prepare_assemble_flags,
- collect=self.__batch_prepare_assemble_collect,
+ self.__batch_prepare_assemble_flags, collect=self.__batch_prepare_assemble_collect,
)
else:
cm = contextlib.suppress()
@@ -1849,18 +1697,14 @@ class Element(Plugin):
# Step 3 - Prepare
self.__prepare(sandbox)
# Step 4 - Assemble
- collect = self.assemble(
- sandbox
- ) # pylint: disable=assignment-from-no-return
+ collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except (ElementError, SandboxCommandError) as e:
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
- self.__set_build_result(
- success=False, description=str(e), detail=e.detail
- )
+ self.__set_build_result(success=False, description=str(e), detail=e.detail)
self._cache_artifact(rootdir, sandbox, e.collect)
raise
@@ -1903,9 +1747,7 @@ class Element(Plugin):
if collect is not None:
try:
- collectvdir = sandbox_vroot.descend(
- *collect.lstrip(os.sep).split(os.sep)
- )
+ collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
sandbox._fetch_missing_blobs(collectvdir)
except VirtualDirectoryError:
pass
@@ -1914,9 +1756,7 @@ class Element(Plugin):
self._assemble_done()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(
- rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata
- )
+ artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(
@@ -2023,9 +1863,7 @@ class Element(Plugin):
def _skip_source_push(self):
if not self.__sources or self._get_workspace():
return True
- return not (
- self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached()
- )
+ return not (self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached())
def _source_push(self):
# try and push sources if we've got them
@@ -2101,20 +1939,10 @@ class Element(Plugin):
#
# If directory is not specified, one will be staged using scope
def _shell(
- self,
- scope=None,
- directory=None,
- *,
- mounts=None,
- isolate=False,
- prompt=None,
- command=None,
- usebuildtree=False
+ self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None, usebuildtree=False
):
- with self._prepare_sandbox(
- scope, directory, shell=True, usebuildtree=usebuildtree
- ) as sandbox:
+ with self._prepare_sandbox(scope, directory, shell=True, usebuildtree=usebuildtree) as sandbox:
environment = self.get_environment()
environment = copy.copy(environment)
flags = SandboxFlags.INTERACTIVE | SandboxFlags.ROOT_READ_ONLY
@@ -2123,11 +1951,7 @@ class Element(Plugin):
# subproject, we want to use the rules defined by the main one.
context = self._get_context()
project = context.get_toplevel_project()
- (
- shell_command,
- shell_environment,
- shell_host_files,
- ) = project.get_shell_config()
+ (shell_command, shell_environment, shell_host_files,) = project.get_shell_config()
if prompt is not None:
environment["PS1"] = prompt
@@ -2150,11 +1974,7 @@ class Element(Plugin):
for mount in shell_host_files + mounts:
if not os.path.exists(mount.host_path):
if not mount.optional:
- self.warn(
- "Not mounting non-existing host file: {}".format(
- mount.host_path
- )
- )
+ self.warn("Not mounting non-existing host file: {}".format(mount.host_path))
else:
sandbox.mark_directory(mount.path)
sandbox._set_mount_source(mount.path, mount.host_path)
@@ -2190,9 +2010,7 @@ class Element(Plugin):
# additional support from Source implementations.
#
os.makedirs(context.builddir, exist_ok=True)
- with utils._tempdir(
- dir=context.builddir, prefix="workspace-{}".format(self.normal_name)
- ) as temp:
+ with utils._tempdir(dir=context.builddir, prefix="workspace-{}".format(self.normal_name)) as temp:
for source in self.sources():
source._init_workspace(temp)
@@ -2309,10 +2127,7 @@ class Element(Plugin):
continue
# try and fetch from source cache
- if (
- source._get_consistency() < Consistency.CACHED
- and self.__sourcecache.has_fetch_remotes()
- ):
+ if source._get_consistency() < Consistency.CACHED and self.__sourcecache.has_fetch_remotes():
if self.__sourcecache.pull(source):
continue
@@ -2345,11 +2160,7 @@ class Element(Plugin):
# Generate dict that is used as base for all cache keys
if self.__cache_key_dict is None:
# Filter out nocache variables from the element's environment
- cache_env = {
- key: value
- for key, value in self.__environment.items()
- if key not in self.__env_nocache
- }
+ cache_env = {key: value for key, value in self.__environment.items() if key not in self.__env_nocache}
project = self._get_project()
@@ -2491,11 +2302,7 @@ class Element(Plugin):
#
def _update_ready_for_runtime_and_cached(self):
if not self.__ready_for_runtime_and_cached:
- if (
- self.__runtime_deps_uncached == 0
- and self._cached_success()
- and self.__cache_key
- ):
+ if self.__runtime_deps_uncached == 0 and self._cached_success() and self.__cache_key:
self.__ready_for_runtime_and_cached = True
# Notify reverse dependencies
@@ -2661,40 +2468,30 @@ class Element(Plugin):
def __preflight(self):
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
- if any(self.dependencies(Scope.RUN, recurse=False)) or any(
- self.dependencies(Scope.BUILD, recurse=False)
- ):
+ if any(self.dependencies(Scope.RUN, recurse=False)) or any(self.dependencies(Scope.BUILD, recurse=False)):
raise ElementError(
- "{}: Dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-depends",
)
if self.BST_FORBID_RDEPENDS:
if any(self.dependencies(Scope.RUN, recurse=False)):
raise ElementError(
- "{}: Runtime dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Runtime dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-rdepends",
)
if self.BST_FORBID_BDEPENDS:
if any(self.dependencies(Scope.BUILD, recurse=False)):
raise ElementError(
- "{}: Build dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Build dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-bdepends",
)
if self.BST_FORBID_SOURCES:
if any(self.sources()):
raise ElementError(
- "{}: Sources are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Sources are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-sources",
)
@@ -2702,17 +2499,13 @@ class Element(Plugin):
self.preflight()
except BstError as e:
# Prepend provenance to the error
- raise ElementError(
- "{}: {}".format(self, e), reason=e.reason, detail=e.detail
- ) from e
+ raise ElementError("{}: {}".format(self, e), reason=e.reason, detail=e.detail) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError(
"{}: {} cannot be the first source of an element "
- "as it requires access to previous sources".format(
- self, self.__sources[0]
- )
+ "as it requires access to previous sources".format(self, self.__sources[0])
)
# Preflight the sources
@@ -2724,9 +2517,7 @@ class Element(Plugin):
# Raises an error if the artifact is not cached.
#
def __assert_cached(self):
- assert self._cached(), "{}: Missing artifact {}".format(
- self, self._get_brief_display_key()
- )
+ assert self._cached(), "{}: Missing artifact {}".format(self, self._get_brief_display_key())
# __get_tainted():
#
@@ -2749,16 +2540,10 @@ class Element(Plugin):
workspaced = self.__artifact.get_metadata_workspaced()
# Whether this artifact's dependencies have workspaces
- workspaced_dependencies = (
- self.__artifact.get_metadata_workspaced_dependencies()
- )
+ workspaced_dependencies = self.__artifact.get_metadata_workspaced_dependencies()
# Other conditions should be or-ed
- self.__tainted = (
- workspaced
- or workspaced_dependencies
- or not self.__sandbox_config_supported
- )
+ self.__tainted = workspaced or workspaced_dependencies or not self.__sandbox_config_supported
return self.__tainted
@@ -2790,13 +2575,7 @@ class Element(Plugin):
#
@contextmanager
def __sandbox(
- self,
- directory,
- stdout=None,
- stderr=None,
- config=None,
- allow_remote=True,
- bare_directory=False,
+ self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False,
):
context = self._get_context()
project = self._get_project()
@@ -2809,20 +2588,12 @@ class Element(Plugin):
"Element {} is configured to use remote execution but plugin does not support it.".format(
self.name
),
- detail="Plugin '{kind}' does not support virtual directories.".format(
- kind=self.get_kind()
- ),
+ detail="Plugin '{kind}' does not support virtual directories.".format(kind=self.get_kind()),
)
- self.info(
- "Using a remote sandbox for artifact {} with directory '{}'".format(
- self.name, directory
- )
- )
+ self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
- output_files_required = (
- context.require_artifact_files or self._artifact_files_required()
- )
+ output_files_required = context.require_artifact_files or self._artifact_files_required()
sandbox = SandboxRemote(
context,
@@ -2856,18 +2627,11 @@ class Element(Plugin):
else:
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(
- prefix="{}-".format(self.normal_name), dir=context.builddir
- )
+ rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
# Recursive contextmanager...
with self.__sandbox(
- rootdir,
- stdout=stdout,
- stderr=stderr,
- config=config,
- allow_remote=allow_remote,
- bare_directory=False,
+ rootdir, stdout=stdout, stderr=stderr, config=config, allow_remote=allow_remote, bare_directory=False,
) as sandbox:
yield sandbox
@@ -2997,9 +2761,7 @@ class Element(Plugin):
provenance = node.get_provenance()
if not provenance._is_synthetic:
raise LoadError(
- "{}: invalid redefinition of protected variable '{}'".format(
- provenance, var
- ),
+ "{}: invalid redefinition of protected variable '{}'".format(provenance, var),
LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
)
@@ -3043,9 +2805,7 @@ class Element(Plugin):
sandbox_config._assert_fully_composited()
# Sandbox config, unlike others, has fixed members so we should validate them
- sandbox_config.validate_keys(
- ["build-uid", "build-gid", "build-os", "build-arch"]
- )
+ sandbox_config.validate_keys(["build-uid", "build-gid", "build-os", "build-arch"])
build_arch = sandbox_config.get_str("build-arch", default=None)
if build_arch:
@@ -3093,9 +2853,7 @@ class Element(Plugin):
# Resolve any variables in the public split rules directly
for domain, splits in element_splits.items():
- splits = [
- self.__variables.subst(split.strip()) for split in splits.as_str_list()
- ]
+ splits = [self.__variables.subst(split.strip()) for split in splits.as_str_list()]
element_splits[domain] = splits
return element_public
@@ -3104,11 +2862,7 @@ class Element(Plugin):
bstdata = self.get_public_data("bst")
splits = bstdata.get_mapping("split-rules")
self.__splits = {
- domain: re.compile(
- "^(?:"
- + "|".join([utils._glob2re(r) for r in rules.as_str_list()])
- + ")$"
- )
+ domain: re.compile("^(?:" + "|".join([utils._glob2re(r) for r in rules.as_str_list()]) + ")$")
for domain, rules in splits.items()
}
@@ -3188,9 +2942,7 @@ class Element(Plugin):
return partial(self.__split_filter, element_domains, include, exclude, orphans)
def __compute_splits(self, include=None, exclude=None, orphans=True):
- filter_func = self.__split_filter_func(
- include=include, exclude=exclude, orphans=orphans
- )
+ filter_func = self.__split_filter_func(include=include, exclude=exclude, orphans=orphans)
files_vdir = self.__artifact.get_files()
@@ -3213,9 +2965,7 @@ class Element(Plugin):
if not self.__whitelist_regex:
bstdata = self.get_public_data("bst")
whitelist = bstdata.get_str_list("overlap-whitelist", default=[])
- whitelist_expressions = [
- utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist
- ]
+ whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
@@ -3296,9 +3046,7 @@ class Element(Plugin):
# commit all other sources by themselves
for ix, source in enumerate(self.__sources):
if source.BST_REQUIRES_PREVIOUS_SOURCES_STAGE:
- self.__sourcecache.commit(
- source, self.__sources[last_requires_previous:ix]
- )
+ self.__sourcecache.commit(source, self.__sources[last_requires_previous:ix])
last_requires_previous = ix
else:
self.__sourcecache.commit(source, [])
@@ -3386,9 +3134,7 @@ class Element(Plugin):
if self.__strict_cache_key is None:
dependencies = [
- [e.project_name, e.name, e.__strict_cache_key]
- if e.__strict_cache_key is not None
- else None
+ [e.project_name, e.name, e.__strict_cache_key] if e.__strict_cache_key is not None else None
for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self._calculate_cache_key(dependencies)
@@ -3404,10 +3150,7 @@ class Element(Plugin):
else:
self.__update_strict_cache_key_of_rdeps()
- if (
- self.__strict_cache_key is not None
- and self.__can_query_cache_callback is not None
- ):
+ if self.__strict_cache_key is not None and self.__can_query_cache_callback is not None:
self.__can_query_cache_callback(self)
self.__can_query_cache_callback = None
@@ -3435,10 +3178,7 @@ class Element(Plugin):
if not self.__strict_artifact:
self.__strict_artifact = Artifact(
- self,
- context,
- strong_key=self.__strict_cache_key,
- weak_key=self.__weak_cache_key,
+ self, context, strong_key=self.__strict_cache_key, weak_key=self.__weak_cache_key,
)
if context.get_strict():
@@ -3471,9 +3211,7 @@ class Element(Plugin):
self.__cache_key = strong_key
elif self.__assemble_scheduled or self.__assemble_done:
# Artifact will or has been built, not downloaded
- dependencies = [
- e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- ]
+ dependencies = [e._get_cache_key() for e in self.dependencies(Scope.BUILD)]
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
@@ -3495,10 +3233,7 @@ class Element(Plugin):
#
def __update_strict_cache_key_of_rdeps(self):
if not self.__updated_strict_cache_keys_of_rdeps:
- if (
- self.__runtime_deps_without_strict_cache_key == 0
- and self.__strict_cache_key is not None
- ):
+ if self.__runtime_deps_without_strict_cache_key == 0 and self.__strict_cache_key is not None:
self.__updated_strict_cache_keys_of_rdeps = True
# Notify reverse dependencies
@@ -3532,10 +3267,7 @@ class Element(Plugin):
#
def __update_ready_for_runtime(self):
if not self.__ready_for_runtime:
- if (
- self.__runtime_deps_without_cache_key == 0
- and self.__cache_key is not None
- ):
+ if self.__runtime_deps_without_cache_key == 0 and self.__cache_key is not None:
self.__ready_for_runtime = True
# Notify reverse dependencies
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 2e34106de..6a7bd78e1 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -273,9 +273,7 @@ class Plugin:
# If this plugin has been deprecated, emit a warning.
if self.BST_PLUGIN_DEPRECATED and not self.__deprecation_warning_silenced():
- detail = "Using deprecated plugin {}: {}".format(
- self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE
- )
+ detail = "Using deprecated plugin {}: {}".format(self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE)
self.__message(MessageType.WARN, detail)
def __del__(self):
@@ -316,9 +314,7 @@ class Plugin:
method can be used.
"""
raise ImplError(
- "{tag} plugin '{kind}' does not implement configure()".format(
- tag=self.__type_tag, kind=self.get_kind()
- )
+ "{tag} plugin '{kind}' does not implement configure()".format(tag=self.__type_tag, kind=self.get_kind())
)
def preflight(self) -> None:
@@ -340,9 +336,7 @@ class Plugin:
will raise an error automatically informing the user that a host tool is needed.
"""
raise ImplError(
- "{tag} plugin '{kind}' does not implement preflight()".format(
- tag=self.__type_tag, kind=self.get_kind()
- )
+ "{tag} plugin '{kind}' does not implement preflight()".format(tag=self.__type_tag, kind=self.get_kind())
)
def get_unique_key(self) -> SourceRef:
@@ -419,9 +413,7 @@ class Plugin:
"""
- return self.__project.get_path_from_node(
- node, check_is_file=check_is_file, check_is_dir=check_is_dir
- )
+ return self.__project.get_path_from_node(node, check_is_file=check_is_file, check_is_dir=check_is_dir)
def debug(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a debugging message
@@ -459,13 +451,7 @@ class Plugin:
"""
self.__message(MessageType.INFO, brief, detail=detail)
- def warn(
- self,
- brief: str,
- *,
- detail: Optional[str] = None,
- warning_token: Optional[str] = None
- ) -> None:
+ def warn(self, brief: str, *, detail: Optional[str] = None, warning_token: Optional[str] = None) -> None:
"""Print a warning message, checks warning_token against project configuration
Args:
@@ -485,9 +471,7 @@ class Plugin:
if project._warning_is_fatal(warning_token):
detail = detail if detail else ""
- raise PluginError(
- message="{}\n{}".format(brief, detail), reason=warning_token
- )
+ raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
self.__message(MessageType.WARN, brief=brief, detail=detail)
@@ -505,11 +489,7 @@ class Plugin:
@contextmanager
def timed_activity(
- self,
- activity_name: str,
- *,
- detail: Optional[str] = None,
- silent_nested: bool = False
+ self, activity_name: str, *, detail: Optional[str] = None, silent_nested: bool = False
) -> Generator[None, None, None]:
"""Context manager for performing timed activities in plugins
@@ -533,20 +513,11 @@ class Plugin:
self.call(... command which takes time ...)
"""
with self.__context.messenger.timed_activity(
- activity_name,
- element_name=self._get_full_name(),
- detail=detail,
- silent_nested=silent_nested,
+ activity_name, element_name=self._get_full_name(), detail=detail, silent_nested=silent_nested,
):
yield
- def call(
- self,
- *popenargs,
- fail: Optional[str] = None,
- fail_temporarily: bool = False,
- **kwargs
- ) -> int:
+ def call(self, *popenargs, fail: Optional[str] = None, fail_temporarily: bool = False, **kwargs) -> int:
"""A wrapper for subprocess.call()
Args:
@@ -577,14 +548,10 @@ class Plugin:
"Failed to download ponies from {}".format(
self.mirror_directory))
"""
- exit_code, _ = self.__call(
- *popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs
- )
+ exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
return exit_code
- def check_output(
- self, *popenargs, fail=None, fail_temporarily=False, **kwargs
- ) -> Tuple[int, str]:
+ def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs) -> Tuple[int, str]:
"""A wrapper for subprocess.check_output()
Args:
@@ -630,13 +597,7 @@ class Plugin:
raise SourceError(
fmt.format(plugin=self, track=tracking)) from e
"""
- return self.__call(
- *popenargs,
- collect_stdout=True,
- fail=fail,
- fail_temporarily=fail_temporarily,
- **kwargs
- )
+ return self.__call(*popenargs, collect_stdout=True, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
#############################################################
# Private Methods used in BuildStream #
@@ -773,14 +734,7 @@ class Plugin:
# Internal subprocess implementation for the call() and check_output() APIs
#
- def __call(
- self,
- *popenargs,
- collect_stdout=False,
- fail=None,
- fail_temporarily=False,
- **kwargs
- ):
+ def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
with self._output_file() as output_file:
if "stdout" not in kwargs:
@@ -796,16 +750,13 @@ class Plugin:
if fail and exit_code:
raise PluginError(
- "{plugin}: {message}".format(plugin=self, message=fail),
- temporary=fail_temporarily,
+ "{plugin}: {message}".format(plugin=self, message=fail), temporary=fail_temporarily,
)
return (exit_code, output)
def __message(self, message_type, brief, **kwargs):
- message = Message(
- message_type, brief, element_name=self._get_full_name(), **kwargs
- )
+ message = Message(message_type, brief, element_name=self._get_full_name(), **kwargs)
self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
@@ -834,9 +785,7 @@ class Plugin:
def __get_full_name(self):
project = self.__project
# Set the name, depending on element or source plugin type
- name = (
- self._element_name if self.__type_tag == "source" else self.name
- ) # pylint: disable=no-member
+ name = self._element_name if self.__type_tag == "source" else self.name # pylint: disable=no-member
if project.junction:
return "{}:{}".format(project.junction.name, name)
else:
@@ -845,9 +794,7 @@ class Plugin:
# A local table for _prefix_warning()
#
-__CORE_WARNINGS = [
- value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")
-]
+__CORE_WARNINGS = [value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")]
# _prefix_warning():
diff --git a/src/buildstream/plugins/elements/autotools.py b/src/buildstream/plugins/elements/autotools.py
index 71b2e5854..089c9bca0 100644
--- a/src/buildstream/plugins/elements/autotools.py
+++ b/src/buildstream/plugins/elements/autotools.py
@@ -66,9 +66,7 @@ class AutotoolsElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 461320008..c54c317b0 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -102,9 +102,7 @@ class ComposeElement(Element):
with self.timed_activity("Computing split", silent_nested=True):
for dep in self.dependencies(Scope.BUILD):
files = dep.compute_manifest(
- include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans,
+ include=self.include, exclude=self.exclude, orphans=self.include_orphans,
)
manifest.update(files)
@@ -186,13 +184,9 @@ class ComposeElement(Element):
def import_filter(path):
return path in manifest
- with self.timed_activity(
- "Creating composition", detail=detail, silent_nested=True
- ):
+ with self.timed_activity("Creating composition", detail=detail, silent_nested=True):
self.info("Composing {} files".format(len(manifest)))
- installdir.import_files(
- vbasedir, filter_callback=import_filter, can_link=True
- )
+ installdir.import_files(vbasedir, filter_callback=import_filter, can_link=True)
# And we're done
return os.path.join(os.sep, "buildstream", "install")
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index 17e15c80c..49bebd5a9 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,9 +167,7 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys(
- ["include", "exclude", "include-orphans", "pass-integration"]
- )
+ node.validate_keys(["include", "exclude", "include-orphans", "pass-integration"])
self.include_node = node.get_sequence("include")
self.exclude_node = node.get_sequence("exclude")
@@ -211,13 +209,9 @@ class FilterElement(Element):
# If a parent does not produce an artifact, fail and inform user that the dependency
# must produce artifacts
if not build_deps[0].BST_ELEMENT_HAS_ARTIFACT:
- detail = "{} does not produce an artifact, so there is nothing to filter".format(
- build_deps[0].name
- )
+ detail = "{} does not produce an artifact, so there is nothing to filter".format(build_deps[0].name)
raise ElementError(
- "{}: {} element's build dependency must produce an artifact".format(
- self, type(self).__name__
- ),
+ "{}: {} element's build dependency must produce an artifact".format(self, type(self).__name__),
detail=detail,
reason="filter-bdepend-no-artifact",
)
@@ -253,34 +247,19 @@ class FilterElement(Element):
detail = []
if unfound_includes:
- detail.append(
- "Unknown domains were used in {}".format(
- self.include_node.get_provenance()
- )
- )
- detail.extend(
- [" - {}".format(domain) for domain in unfound_includes]
- )
+ detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
+ detail.extend([" - {}".format(domain) for domain in unfound_includes])
if unfound_excludes:
- detail.append(
- "Unknown domains were used in {}".format(
- self.exclude_node.get_provenance()
- )
- )
- detail.extend(
- [" - {}".format(domain) for domain in unfound_excludes]
- )
+ detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
+ detail.extend([" - {}".format(domain) for domain in unfound_excludes])
if detail:
detail = "\n".join(detail)
raise ElementError("Unknown domains declared.", detail=detail)
dep.stage_artifact(
- sandbox,
- include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans,
+ sandbox, include=self.include, exclude=self.exclude, orphans=self.include_orphans,
)
return ""
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index b7318b131..2b68197a7 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -55,9 +55,7 @@ class ImportElement(Element):
sources = list(self.sources())
if not sources:
- raise ElementError(
- "{}: An import element must have at least one source.".format(self)
- )
+ raise ElementError("{}: An import element must have at least one source.".format(self))
def get_unique_key(self):
return {"source": self.source, "target": self.target}
@@ -81,16 +79,10 @@ class ImportElement(Element):
inputdir = inputdir.descend(*self.source.strip(os.sep).split(os.sep))
# The output target directory
- outputdir = outputdir.descend(
- *self.target.strip(os.sep).split(os.sep), create=True
- )
+ outputdir = outputdir.descend(*self.target.strip(os.sep).split(os.sep), create=True)
if inputdir.is_empty():
- raise ElementError(
- "{}: No files were found inside directory '{}'".format(
- self, self.source
- )
- )
+ raise ElementError("{}: No files were found inside directory '{}'".format(self, self.source))
# Move it over
outputdir.import_files(inputdir)
@@ -104,9 +96,7 @@ class ImportElement(Element):
commands = []
# The directory to grab
- inputdir = os.path.join(
- build_root, self.normal_name, self.source.lstrip(os.sep)
- )
+ inputdir = os.path.join(build_root, self.normal_name, self.source.lstrip(os.sep))
inputdir = inputdir.rstrip(os.sep)
# The output target directory
@@ -115,9 +105,7 @@ class ImportElement(Element):
# Ensure target directory parent exists but target directory doesn't
commands.append("mkdir -p {}".format(os.path.dirname(outputdir)))
- commands.append(
- "[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir)
- )
+ commands.append("[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir))
# Move it over
commands.append("mv {} {}".format(inputdir, outputdir))
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index f9327352e..42b9ef08e 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -192,12 +192,8 @@ class JunctionElement(Element):
self.target = node.get_str("target", default=None)
self.target_element = None
self.target_junction = None
- self.cache_junction_elements = node.get_bool(
- "cache-junction-elements", default=False
- )
- self.ignore_junction_remotes = node.get_bool(
- "ignore-junction-remotes", default=False
- )
+ self.cache_junction_elements = node.get_bool("cache-junction-elements", default=False)
+ self.ignore_junction_remotes = node.get_bool("ignore-junction-remotes", default=False)
def preflight(self):
# "target" cannot be used in conjunction with:
@@ -205,33 +201,23 @@ class JunctionElement(Element):
# 2. config['options']
# 3. config['path']
if self.target and any(self.sources()):
- raise ElementError(
- "junction elements cannot define both 'sources' and 'target' config option"
- )
+ raise ElementError("junction elements cannot define both 'sources' and 'target' config option")
if self.target and any(self.options.items()):
- raise ElementError(
- "junction elements cannot define both 'options' and 'target'"
- )
+ raise ElementError("junction elements cannot define both 'options' and 'target'")
if self.target and self.path:
- raise ElementError(
- "junction elements cannot define both 'path' and 'target'"
- )
+ raise ElementError("junction elements cannot define both 'path' and 'target'")
# Validate format of target, if defined
if self.target:
try:
self.target_junction, self.target_element = self.target.split(":")
except ValueError:
- raise ElementError(
- "'target' option must be in format '{junction-name}:{element-name}'"
- )
+ raise ElementError("'target' option must be in format '{junction-name}:{element-name}'")
# We cannot target a junction that has the same name as us, since that
# will cause an infinite recursion while trying to load it.
if self.name == self.target_element:
- raise ElementError(
- "junction elements cannot target an element with the same name"
- )
+ raise ElementError("junction elements cannot target an element with the same name")
def get_unique_key(self):
# Junctions do not produce artifacts. get_unique_key() implementation
diff --git a/src/buildstream/plugins/elements/manual.py b/src/buildstream/plugins/elements/manual.py
index 4e9fded17..97da41615 100644
--- a/src/buildstream/plugins/elements/manual.py
+++ b/src/buildstream/plugins/elements/manual.py
@@ -42,9 +42,7 @@ class ManualElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/pip.py b/src/buildstream/plugins/elements/pip.py
index 175568955..93303748d 100644
--- a/src/buildstream/plugins/elements/pip.py
+++ b/src/buildstream/plugins/elements/pip.py
@@ -42,9 +42,7 @@ class PipElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 2db4274f2..6449bc7d5 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -79,9 +79,7 @@ class DownloadableFileSource(Source):
self.original_url = node.get_str("url")
self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
- self._mirror_dir = os.path.join(
- self.get_mirror_directory(), utils.url_directory_name(self.original_url)
- )
+ self._mirror_dir = os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
self._warn_deprecated_etag(node)
def preflight(self):
@@ -143,9 +141,7 @@ class DownloadableFileSource(Source):
sha256 = self._ensure_mirror()
if sha256 != self.ref:
raise SourceError(
- "File downloaded from {} has sha256sum '{}', not '{}'!".format(
- self.url, sha256, self.ref
- )
+ "File downloaded from {} has sha256sum '{}', not '{}'!".format(self.url, sha256, self.ref)
)
def _warn_deprecated_etag(self, node):
@@ -217,21 +213,12 @@ class DownloadableFileSource(Source):
# Because we use etag only for matching ref, currently specified ref is what
# we would have downloaded.
return self.ref
- raise SourceError(
- "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
- ) from e
-
- except (
- urllib.error.URLError,
- urllib.error.ContentTooShortError,
- OSError,
- ValueError,
- ) as e:
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
+
+ except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError,) as e:
# Note that urllib.request.Request in the try block may throw a
# ValueError for unknown url types, so we handle it here.
- raise SourceError(
- "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
- ) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
def _get_mirror_file(self, sha=None):
if sha is not None:
@@ -261,7 +248,5 @@ class DownloadableFileSource(Source):
netrc_pw_mgr = _NetrcPasswordManager(netrc_config)
http_auth = urllib.request.HTTPBasicAuthHandler(netrc_pw_mgr)
ftp_handler = _NetrcFTPOpener(netrc_config)
- DownloadableFileSource.__urlopener = urllib.request.build_opener(
- http_auth, ftp_handler
- )
+ DownloadableFileSource.__urlopener = urllib.request.build_opener(http_auth, ftp_handler)
return DownloadableFileSource.__urlopener
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 657a885c2..ec3317649 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -102,33 +102,19 @@ class BzrSource(Source):
node["ref"] = self.ref = ref
def track(self):
- with self.timed_activity(
- "Tracking {}".format(self.url), silent_nested=True
- ), self._locked():
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror(skip_ref_check=True)
ret, out = self.check_output(
- [
- self.host_bzr,
- "version-info",
- "--custom",
- "--template={revno}",
- self._get_branch_dir(),
- ],
- fail="Failed to read the revision number at '{}'".format(
- self._get_branch_dir()
- ),
+ [self.host_bzr, "version-info", "--custom", "--template={revno}", self._get_branch_dir(),],
+ fail="Failed to read the revision number at '{}'".format(self._get_branch_dir()),
)
if ret != 0:
- raise SourceError(
- "{}: Failed to get ref for tracking {}".format(self, self.tracking)
- )
+ raise SourceError("{}: Failed to get ref for tracking {}".format(self, self.tracking))
return out
def fetch(self):
- with self.timed_activity(
- "Fetching {}".format(self.url), silent_nested=True
- ), self._locked():
+ with self.timed_activity("Fetching {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror()
def stage(self, directory):
@@ -150,9 +136,7 @@ class BzrSource(Source):
def init_workspace(self, directory):
url = os.path.join(self.url, self.tracking)
- with self.timed_activity(
- 'Setting up workspace "{}"'.format(directory), silent_nested=True
- ):
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
# Checkout from the cache
self.call(
[
@@ -181,9 +165,7 @@ class BzrSource(Source):
@contextmanager
def _locked(self):
lockdir = os.path.join(self.get_mirror_directory(), "locks")
- lockfile = os.path.join(
- lockdir, utils.url_directory_name(self.original_url) + ".lock"
- )
+ lockfile = os.path.join(lockdir, utils.url_directory_name(self.original_url) + ".lock")
os.makedirs(lockdir, exist_ok=True)
with open(lockfile, "w") as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
@@ -198,32 +180,21 @@ class BzrSource(Source):
return False
return (
- self.call(
- [
- self.host_bzr,
- "revno",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(),
- ]
- )
- == 0
+ self.call([self.host_bzr, "revno", "--revision=revno:{}".format(self.ref), self._get_branch_dir(),]) == 0
)
def _get_branch_dir(self):
return os.path.join(self._get_mirror_dir(), self.tracking)
def _get_mirror_dir(self):
- return os.path.join(
- self.get_mirror_directory(), utils.url_directory_name(self.original_url)
- )
+ return os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
self.call(
- [self.host_bzr, "init-repo", "--no-trees", mirror_dir],
- fail="Failed to initialize bzr repository",
+ [self.host_bzr, "init-repo", "--no-trees", mirror_dir], fail="Failed to initialize bzr repository",
)
branch_dir = os.path.join(mirror_dir, self.tracking)
@@ -240,19 +211,13 @@ class BzrSource(Source):
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
self.call(
- [
- self.host_bzr,
- "pull",
- "--directory={}".format(branch_dir),
- branch_url,
- ],
+ [self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url,],
fail="Failed to pull new changes for {}".format(branch_dir),
)
if not skip_ref_check and not self._check_ref():
raise SourceError(
- "Failed to ensure ref '{}' was mirrored".format(self.ref),
- reason="ref-not-mirrored",
+ "Failed to ensure ref '{}' was mirrored".format(self.ref), reason="ref-not-mirrored",
)
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index 407241689..a7437b150 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -71,9 +71,7 @@ class DebSource(TarSource):
with open(self._get_mirror_file(), "rb") as deb_file:
arpy_archive = arpy.Archive(fileobj=deb_file)
arpy_archive.read_all_headers()
- data_tar_arpy = [
- v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k
- ][0]
+ data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
# ArchiveFileData is not enough like a file object for tarfile to use.
# Monkey-patching a seekable method makes it close enough for TarFile to open.
data_tar_arpy.seekable = lambda *args: True
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index 471992af9..4e85f6659 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -90,8 +90,7 @@ class LocalSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason="ensure-stage-dir-fail",
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail",
)
def _get_local_path(self):
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 2be4ee2f7..e9c4ff050 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -56,9 +56,7 @@ class PatchSource(Source):
def configure(self, node):
node.validate_keys(["path", "strip-level", *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(
- node.get_scalar("path"), check_is_file=True
- )
+ self.path = self.node_get_project_path(node.get_scalar("path"), check_is_file=True)
self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
@@ -91,20 +89,12 @@ class PatchSource(Source):
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
raise SourceError(
- "Nothing to patch in directory '{}'".format(directory),
- reason="patch-no-files",
+ "Nothing to patch in directory '{}'".format(directory), reason="patch-no-files",
)
strip_level_option = "-p{}".format(self.strip_level)
self.call(
- [
- self.host_patch,
- strip_level_option,
- "-i",
- self.fullpath,
- "-d",
- directory,
- ],
+ [self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory,],
fail="Failed to apply patch {}".format(self.path),
)
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 41e414855..253ac2078 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -95,10 +95,7 @@ _PYTHON_VERSIONS = [
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
-_SDIST_RE = re.compile(
- r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$",
- re.IGNORECASE,
-)
+_SDIST_RE = re.compile(r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$", re.IGNORECASE,)
class PipSource(Source):
@@ -110,9 +107,7 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- node.validate_keys(
- ["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS
- )
+ node.validate_keys(["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS)
self.ref = node.get_str("ref", None)
self.original_url = node.get_str("url", _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
@@ -120,11 +115,7 @@ class PipSource(Source):
self.requirements_files = node.get_str_list("requirements-files", [])
if not (self.packages or self.requirements_files):
- raise SourceError(
- "{}: Either 'packages' or 'requirements-files' must be specified".format(
- self
- )
- )
+ raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified".format(self))
def preflight(self):
# Try to find a pip version that spports download command
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 6705d20e5..af5b4f266 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -67,12 +67,9 @@ class RemoteSource(DownloadableFileSource):
if os.sep in self.filename:
raise SourceError(
- "{}: filename parameter cannot contain directories".format(self),
- reason="filename-contains-directory",
+ "{}: filename parameter cannot contain directories".format(self), reason="filename-contains-directory",
)
- node.validate_keys(
- DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"]
- )
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"])
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index 7e5868baa..8bc0cc743 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -115,9 +115,7 @@ class TarSource(DownloadableFileSource):
def _get_tar(self):
if self.url.endswith(".lz"):
with self._run_lzip() as lzip_dec:
- with tarfile.open(
- fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo
- ) as tar:
+ with tarfile.open(fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo) as tar:
yield tar
else:
with tarfile.open(self._get_mirror_file(), tarinfo=ReadableTarInfo) as tar:
@@ -132,8 +130,7 @@ class TarSource(DownloadableFileSource):
if base_dir:
tar.extractall(
- path=directory,
- members=self._extract_members(tar, base_dir, directory),
+ path=directory, members=self._extract_members(tar, base_dir, directory),
)
else:
tar.extractall(path=directory)
@@ -244,11 +241,7 @@ class TarSource(DownloadableFileSource):
paths = self._list_tar_paths(tar)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError(
- "{}: Could not find base directory matching pattern: {}".format(
- self, pattern
- )
- )
+ raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
return matches[0]
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 56b4db1a4..0db3a6ffa 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -98,8 +98,7 @@ class WorkspaceSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason="ensure-stage-dir-fail",
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail",
)
def _get_local_path(self) -> str:
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 69324b29d..47933c8eb 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -79,9 +79,7 @@ class ZipSource(DownloadableFileSource):
return super().get_unique_key() + [self.base_dir]
def stage(self, directory):
- exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(
- stat.S_IWGRP | stat.S_IWOTH
- )
+ exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(stat.S_IWGRP | stat.S_IWOTH)
noexec_rights = exec_rights & ~(stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
try:
@@ -173,11 +171,7 @@ class ZipSource(DownloadableFileSource):
paths = self._list_archive_paths(archive)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError(
- "{}: Could not find base directory matching pattern: {}".format(
- self, pattern
- )
- )
+ raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
return matches[0]
diff --git a/src/buildstream/sandbox/_mount.py b/src/buildstream/sandbox/_mount.py
index b182a6adc..18751dde5 100644
--- a/src/buildstream/sandbox/_mount.py
+++ b/src/buildstream/sandbox/_mount.py
@@ -38,9 +38,7 @@ class Mount:
self.mount_point = mount_point
self.safe_hardlinks = safe_hardlinks
- self._fuse_mount_options = (
- {} if fuse_mount_options is None else fuse_mount_options
- )
+ self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
# FIXME: When the criteria for mounting something and its parent
# mount is identical, then there is no need to mount an additional
@@ -55,9 +53,7 @@ class Mount:
scratch_directory = sandbox._get_scratch_directory()
# Redirected mount
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
- self.mount_base = os.path.join(
- scratch_directory, utils.url_directory_name(mount_point)
- )
+ self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
self.mount_source = os.path.join(self.mount_base, "mount")
self.mount_tempdir = os.path.join(self.mount_base, "temp")
os.makedirs(self.mount_origin, exist_ok=True)
@@ -87,9 +83,7 @@ class Mount:
@contextmanager
def mounted(self, sandbox):
if self.safe_hardlinks:
- mount = SafeHardlinks(
- self.mount_origin, self.mount_tempdir, self._fuse_mount_options
- )
+ mount = SafeHardlinks(self.mount_origin, self.mount_tempdir, self._fuse_mount_options)
with mount.mounted(self.mount_source):
yield
else:
@@ -122,9 +116,7 @@ class MountMap:
# We want safe hardlinks for any non-root directory where
# artifacts will be staged to
- self.mounts[directory] = Mount(
- sandbox, directory, artifact, fuse_mount_options
- )
+ self.mounts[directory] = Mount(sandbox, directory, artifact, fuse_mount_options)
# get_mount_source()
#
diff --git a/src/buildstream/sandbox/_mounter.py b/src/buildstream/sandbox/_mounter.py
index 57f35bdec..38b9b9c1c 100644
--- a/src/buildstream/sandbox/_mounter.py
+++ b/src/buildstream/sandbox/_mounter.py
@@ -28,14 +28,7 @@ from .. import utils, _signals
class Mounter:
@classmethod
def _mount(
- cls,
- dest,
- src=None,
- mount_type=None,
- stdout=None,
- stderr=None,
- options=None,
- flags=None,
+ cls, dest, src=None, mount_type=None, stdout=None, stderr=None, options=None, flags=None,
):
if stdout is None:
@@ -58,9 +51,7 @@ class Mounter:
status, _ = utils._call(argv, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError(
- "`{}` failed with exit code {}".format(" ".join(argv), status)
- )
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(argv), status))
return dest
@@ -75,9 +66,7 @@ class Mounter:
status, _ = utils._call(cmd, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError(
- "`{}` failed with exit code {}".format(" ".join(cmd), status)
- )
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(cmd), status))
# mount()
#
@@ -108,9 +97,7 @@ class Mounter:
options = ",".join([key for key, val in kwargs.items() if val])
- path = cls._mount(
- dest, src, mount_type, stdout=stdout, stderr=stderr, options=options
- )
+ path = cls._mount(dest, src, mount_type, stdout=stdout, stderr=stderr, options=options)
try:
with _signals.terminator(kill_proc):
yield path
diff --git a/src/buildstream/sandbox/_sandboxbuildbox.py b/src/buildstream/sandbox/_sandboxbuildbox.py
index 15e45a4df..6016fffe3 100644
--- a/src/buildstream/sandbox/_sandboxbuildbox.py
+++ b/src/buildstream/sandbox/_sandboxbuildbox.py
@@ -47,9 +47,7 @@ class SandboxBuildBox(Sandbox):
utils.get_host_tool("buildbox")
except utils.ProgramNotFoundError as Error:
cls._dummy_reasons += ["buildbox not found"]
- raise SandboxError(
- " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
- ) from Error
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
@classmethod
def check_sandbox_config(cls, platform, config):
@@ -74,8 +72,7 @@ class SandboxBuildBox(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
# Grab the full path of the buildbox binary
@@ -83,10 +80,7 @@ class SandboxBuildBox(Sandbox):
buildbox_command = [utils.get_host_tool("buildbox")]
except ProgramNotFoundError as Err:
raise SandboxError(
- (
- "BuildBox not on path, you are using the BuildBox sandbox because "
- "BST_FORCE_SANDBOX=buildbox"
- )
+ ("BuildBox not on path, you are using the BuildBox sandbox because " "BST_FORCE_SANDBOX=buildbox")
) from Err
for mark in self._get_marked_directories():
@@ -109,9 +103,7 @@ class SandboxBuildBox(Sandbox):
if not flags & SandboxFlags.NETWORK_ENABLED:
# TODO
- self._issue_warning(
- "BuildBox sandbox does not have Networking yet", detail=common_details
- )
+ self._issue_warning("BuildBox sandbox does not have Networking yet", detail=common_details)
if cwd is not None:
buildbox_command += ["--chdir=" + cwd]
@@ -124,23 +116,20 @@ class SandboxBuildBox(Sandbox):
if flags & SandboxFlags.INTERACTIVE:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream shells yet",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream shells yet", detail=common_details,
)
if flags & SandboxFlags.ROOT_READ_ONLY:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream `Read only Root`",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream `Read only Root`", detail=common_details,
)
# Set UID and GID
if not flags & SandboxFlags.INHERIT_UID:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream Inherit UID",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream Inherit UID", detail=common_details,
)
os.makedirs(os.path.join(scratch_directory, "mnt"), exist_ok=True)
@@ -179,14 +168,10 @@ class SandboxBuildBox(Sandbox):
)
if exit_code == 0:
- with open(
- os.path.join(scratch_directory, "out"), "rb"
- ) as output_digest_file:
+ with open(os.path.join(scratch_directory, "out"), "rb") as output_digest_file:
output_digest = remote_execution_pb2.Digest()
output_digest.ParseFromString(output_digest_file.read())
- self._vdir = CasBasedDirectory(
- root_directory.cas_cache, digest=output_digest
- )
+ self._vdir = CasBasedDirectory(root_directory.cas_cache, digest=output_digest)
return exit_code
@@ -210,9 +195,7 @@ class SandboxBuildBox(Sandbox):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen(
argv,
close_fds=True,
diff --git a/src/buildstream/sandbox/_sandboxbwrap.py b/src/buildstream/sandbox/_sandboxbwrap.py
index d17139293..0e114d9f1 100644
--- a/src/buildstream/sandbox/_sandboxbwrap.py
+++ b/src/buildstream/sandbox/_sandboxbwrap.py
@@ -68,9 +68,7 @@ class SandboxBwrap(Sandbox):
cls._die_with_parent_available = False
cls._json_status_available = False
cls._dummy_reasons += ["Bubblewrap not found"]
- raise SandboxError(
- " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
- ) from Error
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
bwrap_version = _site.get_bwrap_version()
@@ -97,18 +95,7 @@ class SandboxBwrap(Sandbox):
try:
whoami = utils.get_host_tool("whoami")
output = subprocess.check_output(
- [
- bwrap,
- "--ro-bind",
- "/",
- "/",
- "--unshare-user",
- "--uid",
- "0",
- "--gid",
- "0",
- whoami,
- ],
+ [bwrap, "--ro-bind", "/", "/", "--unshare-user", "--uid", "0", "--gid", "0", whoami,],
universal_newlines=True,
).strip()
except subprocess.CalledProcessError:
@@ -123,10 +110,7 @@ class SandboxBwrap(Sandbox):
if cls.user_ns_available:
# User namespace support allows arbitrary build UID/GID settings.
pass
- elif (
- config.build_uid != local_platform._uid
- or config.build_gid != local_platform._gid
- ):
+ elif config.build_uid != local_platform._uid or config.build_gid != local_platform._gid:
# Without user namespace support, the UID/GID in the sandbox
# will match the host UID/GID.
return False
@@ -136,9 +120,7 @@ class SandboxBwrap(Sandbox):
if config.build_os != host_os:
raise SandboxError("Configured and host OS don't match.")
if config.build_arch != host_arch and not local_platform.can_crossbuild(config):
- raise SandboxError(
- "Configured architecture and host architecture don't match."
- )
+ raise SandboxError("Configured architecture and host architecture don't match.")
return True
@@ -150,8 +132,7 @@ class SandboxBwrap(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
# NOTE: MountMap transitively imports `_fuse/fuse.py` which raises an
@@ -218,9 +199,7 @@ class SandboxBwrap(Sandbox):
mount_source_overrides = self._get_mount_sources()
for mark in marked_directories:
mount_point = mark["directory"]
- if (
- mount_point in mount_source_overrides
- ): # pylint: disable=consider-using-get
+ if mount_point in mount_source_overrides: # pylint: disable=consider-using-get
mount_source = mount_source_overrides[mount_point]
else:
mount_source = mount_map.get_mount_source(mount_point)
@@ -287,12 +266,7 @@ class SandboxBwrap(Sandbox):
# Run bubblewrap !
exit_code = self.run_bwrap(
- bwrap_command,
- stdin,
- stdout,
- stderr,
- (flags & SandboxFlags.INTERACTIVE),
- pass_fds,
+ bwrap_command, stdin, stdout, stderr, (flags & SandboxFlags.INTERACTIVE), pass_fds,
)
# Cleanup things which bwrap might have left behind, while
@@ -355,9 +329,7 @@ class SandboxBwrap(Sandbox):
break
if child_exit_code is None:
raise SandboxError(
- "`bwrap' terminated during sandbox setup with exitcode {}".format(
- exit_code
- ),
+ "`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
reason="bwrap-sandbox-fail",
)
exit_code = child_exit_code
diff --git a/src/buildstream/sandbox/_sandboxchroot.py b/src/buildstream/sandbox/_sandboxchroot.py
index ad76bf998..b26f468be 100644
--- a/src/buildstream/sandbox/_sandboxchroot.py
+++ b/src/buildstream/sandbox/_sandboxchroot.py
@@ -81,17 +81,14 @@ class SandboxChroot(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
stdout, stderr = self._get_output()
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
- self.mount_map = MountMap(
- self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS
- )
+ self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS)
# Create a sysroot and run the command inside it
with ExitStack() as stack:
@@ -121,9 +118,7 @@ class SandboxChroot(Sandbox):
if cwd is not None:
workdir = os.path.join(rootfs, cwd.lstrip(os.sep))
os.makedirs(workdir, exist_ok=True)
- status = self.chroot(
- rootfs, command, stdin, stdout, stderr, cwd, env, flags
- )
+ status = self.chroot(rootfs, command, stdin, stdout, stderr, cwd, env, flags)
self._vdir._mark_changed()
return status
@@ -166,9 +161,7 @@ class SandboxChroot(Sandbox):
os.killpg(group_id, signal.SIGCONT)
try:
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
command,
close_fds=True,
@@ -224,9 +217,7 @@ class SandboxChroot(Sandbox):
if str(e) == "Exception occurred in preexec_fn.":
raise SandboxError(
"Could not chroot into {} or chdir into {}. "
- "Ensure you are root and that the relevant directory exists.".format(
- rootfs, cwd
- )
+ "Ensure you are root and that the relevant directory exists.".format(rootfs, cwd)
) from e
# Otherwise, raise a more general error
@@ -262,9 +253,7 @@ class SandboxChroot(Sandbox):
except OSError as err:
if err.errno == 1:
raise SandboxError(
- "Permission denied while creating device node: {}.".format(
- err
- )
+ "Permission denied while creating device node: {}.".format(err)
+ "BuildStream reqiures root permissions for these setttings."
)
@@ -300,9 +289,7 @@ class SandboxChroot(Sandbox):
mount_source = self.mount_map.get_mount_source(point)
mount_point = os.path.join(rootfs, point.lstrip(os.sep))
- with Mounter.bind_mount(
- mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs
- ):
+ with Mounter.bind_mount(mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs):
yield
@contextmanager
@@ -310,9 +297,7 @@ class SandboxChroot(Sandbox):
mount_point = os.path.join(rootfs, src.lstrip(os.sep))
os.makedirs(mount_point, exist_ok=True)
- with Mounter.bind_mount(
- mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs
- ):
+ with Mounter.bind_mount(mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs):
yield
with ExitStack() as stack:
@@ -331,14 +316,7 @@ class SandboxChroot(Sandbox):
# Remount root RO if necessary
if flags & flags & SandboxFlags.ROOT_READ_ONLY:
- root_mount = Mounter.mount(
- rootfs,
- stdout=stdout,
- stderr=stderr,
- remount=True,
- ro=True,
- bind=True,
- )
+ root_mount = Mounter.mount(rootfs, stdout=stdout, stderr=stderr, remount=True, ro=True, bind=True,)
# Since the exit stack has already registered a mount
# for this path, we do not need to register another
# umount call.
@@ -368,13 +346,9 @@ class SandboxChroot(Sandbox):
os.mknod(target, mode=stat.S_IFCHR | dev.st_mode, device=target_dev)
except PermissionError as e:
- raise SandboxError(
- "Could not create device {}, ensure that you have root permissions: {}"
- )
+ raise SandboxError("Could not create device {}, ensure that you have root permissions: {}")
except OSError as e:
- raise SandboxError(
- "Could not create device {}: {}".format(target, e)
- ) from e
+ raise SandboxError("Could not create device {}: {}".format(target, e)) from e
return target
diff --git a/src/buildstream/sandbox/_sandboxdummy.py b/src/buildstream/sandbox/_sandboxdummy.py
index 78c08035d..f9272f007 100644
--- a/src/buildstream/sandbox/_sandboxdummy.py
+++ b/src/buildstream/sandbox/_sandboxdummy.py
@@ -29,11 +29,9 @@ class SandboxDummy(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
raise SandboxError(
- "This platform does not support local builds: {}".format(self._reason),
- reason="unavailable-local-sandbox",
+ "This platform does not support local builds: {}".format(self._reason), reason="unavailable-local-sandbox",
)
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 72b0f8f1a..308be2c3b 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -44,9 +44,7 @@ from .._cas import CASRemote
from .._remote import RemoteSpec
-class RemoteExecutionSpec(
- namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")
-):
+class RemoteExecutionSpec(namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")):
pass
@@ -126,9 +124,7 @@ class SandboxRemote(Sandbox):
provenance = remote_config.get_provenance()
raise _yaml.LoadError(
"{}: '{}' was not present in the remote "
- "execution configuration (remote-execution). ".format(
- str(provenance), keyname
- ),
+ "execution configuration (remote-execution). ".format(str(provenance), keyname),
_yaml.LoadErrorReason.INVALID_DATA,
)
return val
@@ -190,9 +186,7 @@ class SandboxRemote(Sandbox):
config[tls_key] = resolve_path(config.get_str(tls_key))
# TODO: we should probably not be stripping node info and rather load files the safe way
- return RemoteExecutionSpec(
- *[conf.strip_node_info() for conf in service_configs]
- )
+ return RemoteExecutionSpec(*[conf.strip_node_info() for conf in service_configs])
def run_remote_command(self, channel, action_digest):
# Sends an execution request to the remote execution server.
@@ -202,9 +196,7 @@ class SandboxRemote(Sandbox):
# Try to create a communication channel to the BuildGrid server.
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
request = remote_execution_pb2.ExecuteRequest(
- instance_name=self.exec_instance,
- action_digest=action_digest,
- skip_cache_lookup=False,
+ instance_name=self.exec_instance, action_digest=action_digest, skip_cache_lookup=False,
)
def __run_remote_command(stub, execute_request=None, running_operation=None):
@@ -213,9 +205,7 @@ class SandboxRemote(Sandbox):
if execute_request is not None:
operation_iterator = stub.Execute(execute_request)
else:
- request = remote_execution_pb2.WaitExecutionRequest(
- name=running_operation.name
- )
+ request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
operation_iterator = stub.WaitExecution(request)
for operation in operation_iterator:
@@ -229,11 +219,7 @@ class SandboxRemote(Sandbox):
except grpc.RpcError as e:
status_code = e.code()
if status_code == grpc.StatusCode.UNAVAILABLE:
- raise SandboxError(
- "Failed contacting remote execution server at {}.".format(
- self.exec_url
- )
- )
+ raise SandboxError("Failed contacting remote execution server at {}.".format(self.exec_url))
if status_code in (
grpc.StatusCode.INVALID_ARGUMENT,
@@ -278,15 +264,11 @@ class SandboxRemote(Sandbox):
try:
stub.CancelOperation(request)
except grpc.RpcError as e:
- if (
- e.code() == grpc.StatusCode.UNIMPLEMENTED
- or e.code() == grpc.StatusCode.INVALID_ARGUMENT
- ):
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED or e.code() == grpc.StatusCode.INVALID_ARGUMENT:
pass
else:
raise SandboxError(
- "Failed trying to send CancelOperation request: "
- "{} ({})".format(e.details(), e.code().name)
+ "Failed trying to send CancelOperation request: " "{} ({})".format(e.details(), e.code().name)
)
def process_job_output(self, output_directories, output_files, *, failure):
@@ -304,9 +286,7 @@ class SandboxRemote(Sandbox):
error_text = "No output directory was returned from the build server."
raise SandboxError(error_text)
if len(output_directories) > 1:
- error_text = (
- "More than one output directory was returned from the build server: {}."
- )
+ error_text = "More than one output directory was returned from the build server: {}."
raise SandboxError(error_text.format(output_directories))
tree_digest = output_directories[0].tree_digest
@@ -352,20 +332,14 @@ class SandboxRemote(Sandbox):
# however, artifact push remotes will need them.
# Only fetch blobs that are missing on one or multiple
# artifact servers.
- blobs_to_fetch = artifactcache.find_missing_blobs(
- project, local_missing_blobs
- )
+ blobs_to_fetch = artifactcache.find_missing_blobs(project, local_missing_blobs)
with CASRemote(self.storage_remote_spec, cascache) as casremote:
- remote_missing_blobs = cascache.fetch_blobs(
- casremote, blobs_to_fetch
- )
+ remote_missing_blobs = cascache.fetch_blobs(casremote, blobs_to_fetch)
if remote_missing_blobs:
raise SandboxError(
- "{} output files are missing on the CAS server".format(
- len(remote_missing_blobs)
- )
+ "{} output files are missing on the CAS server".format(len(remote_missing_blobs))
)
def _run(self, command, flags, *, cwd, env):
@@ -391,9 +365,7 @@ class SandboxRemote(Sandbox):
input_root_digest = upload_vdir._get_digest()
command_proto = self._create_command(command, cwd, env)
command_digest = utils._message_digest(command_proto.SerializeToString())
- action = remote_execution_pb2.Action(
- command_digest=command_digest, input_root_digest=input_root_digest
- )
+ action = remote_execution_pb2.Action(command_digest=command_digest, input_root_digest=input_root_digest)
action_digest = utils._message_digest(action.SerializeToString())
# check action cache download and download if there
@@ -405,20 +377,14 @@ class SandboxRemote(Sandbox):
casremote.init()
except grpc.RpcError as e:
raise SandboxError(
- "Failed to contact remote execution CAS endpoint at {}: {}".format(
- self.storage_url, e
- )
+ "Failed to contact remote execution CAS endpoint at {}: {}".format(self.storage_url, e)
) from e
# Determine blobs missing on remote
try:
- missing_blobs = cascache.remote_missing_blobs_for_directory(
- casremote, input_root_digest
- )
+ missing_blobs = cascache.remote_missing_blobs_for_directory(casremote, input_root_digest)
except grpc.RpcError as e:
- raise SandboxError(
- "Failed to determine missing blobs: {}".format(e)
- ) from e
+ raise SandboxError("Failed to determine missing blobs: {}".format(e)) from e
# Check if any blobs are also missing locally (partial artifact)
# and pull them from the artifact cache.
@@ -427,17 +393,13 @@ class SandboxRemote(Sandbox):
if local_missing_blobs:
artifactcache.fetch_missing_blobs(project, local_missing_blobs)
except (grpc.RpcError, BstError) as e:
- raise SandboxError(
- "Failed to pull missing blobs from artifact cache: {}".format(e)
- ) from e
+ raise SandboxError("Failed to pull missing blobs from artifact cache: {}".format(e)) from e
# Now, push the missing blobs to the remote.
try:
cascache.send_blobs(casremote, missing_blobs)
except grpc.RpcError as e:
- raise SandboxError(
- "Failed to push source directory to remote: {}".format(e)
- ) from e
+ raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
# Push command and action
try:
@@ -460,9 +422,7 @@ class SandboxRemote(Sandbox):
if url.scheme == "http":
channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
elif url.scheme == "https":
- channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, url.port), self.exec_credentials
- )
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.exec_credentials)
else:
raise SandboxError(
"Remote execution currently only supports the 'http' protocol "
@@ -476,9 +436,7 @@ class SandboxRemote(Sandbox):
# Get output of build
self.process_job_output(
- action_result.output_directories,
- action_result.output_files,
- failure=action_result.exit_code != 0,
+ action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0,
)
if stdout:
@@ -511,9 +469,7 @@ class SandboxRemote(Sandbox):
if url.scheme == "http":
channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
elif url.scheme == "https":
- channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, url.port), self.action_credentials
- )
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.action_credentials)
with channel:
request = remote_execution_pb2.GetActionResultRequest(
@@ -524,11 +480,7 @@ class SandboxRemote(Sandbox):
result = stub.GetActionResult(request)
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise SandboxError(
- "Failed to query action cache: {} ({})".format(
- e.code(), e.details()
- )
- )
+ raise SandboxError("Failed to query action cache: {} ({})".format(e.code(), e.details()))
return None
else:
self.info("Action result found in action cache")
@@ -537,8 +489,7 @@ class SandboxRemote(Sandbox):
def _create_command(self, command, working_directory, environment):
# Creates a command proto
environment_variables = [
- remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v)
- for (k, v) in environment.items()
+ remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v) for (k, v) in environment.items()
]
# Request the whole directory tree as output
@@ -604,16 +555,7 @@ class _SandboxRemoteBatch(_SandboxBatch):
self.main_group.execute(self)
first = self.first_command
- if (
- first
- and self.sandbox.run(
- ["sh", "-c", "-e", self.script],
- self.flags,
- cwd=first.cwd,
- env=first.env,
- )
- != 0
- ):
+ if first and self.sandbox.run(["sh", "-c", "-e", self.script], self.flags, cwd=first.cwd, env=first.env,) != 0:
raise SandboxCommandError("Command execution failed", collect=self.collect)
def execute_group(self, group):
@@ -650,11 +592,7 @@ class _SandboxRemoteBatch(_SandboxBatch):
# Error handling
label = command.label or cmdline
quoted_label = shlex.quote("'{}'".format(label))
- self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(
- quoted_label
- )
+ self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
def execute_call(self, call):
- raise SandboxError(
- "SandboxRemote does not support callbacks in command batches"
- )
+ raise SandboxError("SandboxRemote does not support callbacks in command batches")
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index 2ce556ab8..e661cd31e 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -118,9 +118,7 @@ class Sandbox:
DEVICES = ["/dev/urandom", "/dev/random", "/dev/zero", "/dev/null"]
_dummy_reasons = [] # type: List[str]
- def __init__(
- self, context: "Context", project: "Project", directory: str, **kwargs
- ):
+ def __init__(self, context: "Context", project: "Project", directory: str, **kwargs):
self.__context = context
self.__project = project
self.__directories = [] # type: List[Dict[str, Union[int, str]]]
@@ -312,9 +310,7 @@ class Sandbox:
command = [command]
if self.__batch:
- assert (
- flags == self.__batch.flags
- ), "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
@@ -325,9 +321,7 @@ class Sandbox:
return self._run(command, flags, cwd=cwd, env=env)
@contextmanager
- def batch(
- self, flags: int, *, label: str = None, collect: str = None
- ) -> Generator[None, None, None]:
+ def batch(self, flags: int, *, label: str = None, collect: str = None) -> Generator[None, None, None]:
"""Context manager for command batching
This provides a batch context that defers execution of commands until
@@ -353,9 +347,7 @@ class Sandbox:
if self.__batch:
# Nested batch
- assert (
- flags == self.__batch.flags
- ), "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
parent_group = self.__batch.current_group
parent_group.append(group)
@@ -396,9 +388,7 @@ class Sandbox:
# (int): The program exit code.
#
def _run(self, command, flags, *, cwd, env):
- raise ImplError(
- "Sandbox of type '{}' does not implement _run()".format(type(self).__name__)
- )
+ raise ImplError("Sandbox of type '{}' does not implement _run()".format(type(self).__name__))
# _create_batch()
#
@@ -545,9 +535,7 @@ class Sandbox:
# Returns:
# (str): The sandbox scratch directory
def _get_scratch_directory(self):
- assert (
- not self.__bare_directory
- ), "Scratch is not going to work with bare directories"
+ assert not self.__bare_directory, "Scratch is not going to work with bare directories"
return self.__scratch
# _get_output()
@@ -654,9 +642,7 @@ class Sandbox:
# message (str): A message to issue
# details (str): optional, more detatils
def _issue_warning(self, message, detail=None):
- self.__context.messenger.message(
- Message(MessageType.WARN, message, detail=detail)
- )
+ self.__context.messenger.message(Message(MessageType.WARN, message, detail=detail))
# _SandboxBatch()
@@ -677,9 +663,7 @@ class _SandboxBatch:
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
- cm = context.messenger.timed_activity(
- group.label, element_name=self.sandbox._get_element_name()
- )
+ cm = context.messenger.timed_activity(group.label, element_name=self.sandbox._get_element_name())
else:
cm = contextlib.suppress()
@@ -697,16 +681,12 @@ class _SandboxBatch:
)
context.messenger.message(message)
- exitcode = self.sandbox._run(
- command.command, self.flags, cwd=command.cwd, env=command.env
- )
+ exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
if exitcode != 0:
cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
label = command.label or cmdline
raise SandboxCommandError(
- "Command failed with exitcode {}".format(exitcode),
- detail=label,
- collect=self.collect,
+ "Command failed with exitcode {}".format(exitcode), detail=label, collect=self.collect,
)
def execute_call(self, call):
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index d90e8b6ba..b93c36d1f 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -221,16 +221,12 @@ class ScriptElement(Element):
if not self.__layout:
# if no layout set, stage all dependencies into /
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity(
- "Staging {} at /".format(build_dep.name), silent_nested=True
- ):
+ with self.timed_activity("Staging {} at /".format(build_dep.name), silent_nested=True):
build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
with sandbox.batch(SandboxFlags.NONE):
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity(
- "Integrating {}".format(build_dep.name), silent_nested=True
- ):
+ with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
for dep in build_dep.dependencies(Scope.RUN):
dep.integrate(sandbox)
else:
@@ -243,23 +239,15 @@ class ScriptElement(Element):
element = self.search(Scope.BUILD, item["element"])
if item["destination"] == "/":
- with self.timed_activity(
- "Staging {} at /".format(element.name), silent_nested=True
- ):
+ with self.timed_activity("Staging {} at /".format(element.name), silent_nested=True):
element.stage_dependency_artifacts(sandbox, Scope.RUN)
else:
with self.timed_activity(
- "Staging {} at {}".format(element.name, item["destination"]),
- silent_nested=True,
+ "Staging {} at {}".format(element.name, item["destination"]), silent_nested=True,
):
virtual_dstdir = sandbox.get_virtual_directory()
- virtual_dstdir.descend(
- *item["destination"].lstrip(os.sep).split(os.sep),
- create=True
- )
- element.stage_dependency_artifacts(
- sandbox, Scope.RUN, path=item["destination"]
- )
+ virtual_dstdir.descend(*item["destination"].lstrip(os.sep).split(os.sep), create=True)
+ element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item["destination"])
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
@@ -272,16 +260,12 @@ class ScriptElement(Element):
# Integration commands can only be run for elements staged to /
if item["destination"] == "/":
- with self.timed_activity(
- "Integrating {}".format(element.name), silent_nested=True
- ):
+ with self.timed_activity("Integrating {}".format(element.name), silent_nested=True):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
- sandbox.get_virtual_directory().descend(
- *install_root_path_components, create=True
- )
+ sandbox.get_virtual_directory().descend(*install_root_path_components, create=True)
def assemble(self, sandbox):
@@ -307,13 +291,9 @@ class ScriptElement(Element):
def __validate_layout(self):
if self.__layout:
# Cannot proceeed if layout is used, but none are for "/"
- root_defined = any(
- [(entry["destination"] == "/") for entry in self.__layout]
- )
+ root_defined = any([(entry["destination"] == "/") for entry in self.__layout])
if not root_defined:
- raise ElementError(
- "{}: Using layout, but none are staged as '/'".format(self)
- )
+ raise ElementError("{}: Using layout, but none are staged as '/'".format(self))
# Cannot proceed if layout specifies an element that isn't part
# of the dependencies.
@@ -321,9 +301,7 @@ class ScriptElement(Element):
if item["element"]:
if not self.search(Scope.BUILD, item["element"]):
raise ElementError(
- "{}: '{}' in layout not found in dependencies".format(
- self, item["element"]
- )
+ "{}: '{}' in layout not found in dependencies".format(self, item["element"])
)
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index f8de12bc7..59d78ba6e 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -200,19 +200,10 @@ class SourceError(BstError):
"""
def __init__(
- self,
- message: str,
- *,
- detail: Optional[str] = None,
- reason: Optional[str] = None,
- temporary: bool = False
+ self, message: str, *, detail: Optional[str] = None, reason: Optional[str] = None, temporary: bool = False
):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.SOURCE,
- reason=reason,
- temporary=temporary,
+ message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary,
)
@@ -254,9 +245,7 @@ class SourceFetcher:
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError(
- "SourceFetcher '{}' does not implement fetch()".format(type(self))
- )
+ raise ImplError("SourceFetcher '{}' does not implement fetch()".format(type(self)))
#############################################################
# Public Methods #
@@ -356,9 +345,7 @@ class Source(Plugin):
):
provenance = meta.config.get_provenance()
# Set element_name member before parent init, as needed for debug messaging
- self.__element_name = (
- meta.element_name
- ) # The name of the element owning this source
+ self.__element_name = meta.element_name # The name of the element owning this source
super().__init__(
"{}-{}".format(meta.element_name, meta.element_index),
context,
@@ -368,12 +355,8 @@ class Source(Plugin):
unique_id=unique_id,
)
- self.__element_index = (
- meta.element_index
- ) # The index of the source in the owning element's source list
- self.__element_kind = (
- meta.element_kind
- ) # The kind of the element owning this source
+ self.__element_index = meta.element_index # The index of the source in the owning element's source list
+ self.__element_kind = meta.element_kind # The kind of the element owning this source
self.__directory = meta.directory # Staging relative directory
self.__consistency = Consistency.INCONSISTENT # Cached consistency state
self.__meta_kind = meta.kind # The kind of this source, required for unpickling
@@ -381,9 +364,7 @@ class Source(Plugin):
self.__key = None # Cache key for source
# The alias_override is only set on a re-instantiated Source
- self.__alias_override = (
- alias_override # Tuple of alias and its override to use instead
- )
+ self.__alias_override = alias_override # Tuple of alias and its override to use instead
self.__expected_alias = None # The primary alias
# Set of marked download URLs
self.__marked_urls = set() # type: Set[str]
@@ -416,11 +397,7 @@ class Source(Plugin):
Returns:
(:class:`.Consistency`): The source consistency
"""
- raise ImplError(
- "Source plugin '{}' does not implement get_consistency()".format(
- self.get_kind()
- )
- )
+ raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
def load_ref(self, node: MappingNode) -> None:
"""Loads the *ref* for this Source from the specified *node*.
@@ -438,9 +415,7 @@ class Source(Plugin):
*Since: 1.2*
"""
- raise ImplError(
- "Source plugin '{}' does not implement load_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind()))
def get_ref(self) -> SourceRef:
"""Fetch the internal ref, however it is represented
@@ -458,9 +433,7 @@ class Source(Plugin):
Implementations *must* return a ``None`` value in the case that
the ref was not loaded. E.g. a ``(None, None)`` tuple is not acceptable.
"""
- raise ImplError(
- "Source plugin '{}' does not implement get_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement get_ref()".format(self.get_kind()))
def set_ref(self, ref: SourceRef, node: MappingNode) -> None:
"""Applies the internal ref, however it is represented
@@ -478,9 +451,7 @@ class Source(Plugin):
Implementors must support the special ``None`` value here to
allow clearing any existing ref.
"""
- raise ImplError(
- "Source plugin '{}' does not implement set_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
def track(self, **kwargs) -> SourceRef:
"""Resolve a new ref from the plugin's track option
@@ -526,9 +497,7 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError(
- "Source plugin '{}' does not implement fetch()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind()))
def stage(self, directory: Union[str, Directory]) -> None:
"""Stage the sources to a directory
@@ -545,9 +514,7 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` when encountering
some system error.
"""
- raise ImplError(
- "Source plugin '{}' does not implement stage()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement stage()".format(self.get_kind()))
def init_workspace(self, directory: str) -> None:
"""Initialises a new workspace
@@ -622,9 +589,7 @@ class Source(Plugin):
return self.__mirror_directory
- def translate_url(
- self, url: str, *, alias_override: Optional[str] = None, primary: bool = True
- ) -> str:
+ def translate_url(self, url: str, *, alias_override: Optional[str] = None, primary: bool = True) -> str:
"""Translates the given url which may be specified with an alias
into a fully qualified url.
@@ -689,8 +654,7 @@ class Source(Plugin):
expected_alias = _extract_alias(url)
assert (
- self.__expected_alias is None
- or self.__expected_alias == expected_alias
+ self.__expected_alias is None or self.__expected_alias == expected_alias
), "Primary URL marked twice with different URLs"
self.__expected_alias = expected_alias
@@ -801,9 +765,7 @@ class Source(Plugin):
# Source consistency interrogations are silent.
context = self._get_context()
with context.messenger.silence():
- self.__consistency = (
- self.get_consistency()
- ) # pylint: disable=assignment-from-no-return
+ self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
# Give the Source an opportunity to validate the cached
# sources as soon as the Source becomes Consistency.CACHED.
@@ -826,9 +788,7 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
with self.__stage_previous_sources(previous_sources) as staging_directory:
- self.__do_fetch(
- previous_sources_dir=self.__ensure_directory(staging_directory)
- )
+ self.__do_fetch(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
self.__do_fetch()
@@ -842,9 +802,7 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
# _get_unique_key should be called before _stage
assert self.__digest is not None
- cas_dir = CasBasedDirectory(
- self._get_context().get_cascache(), digest=self.__digest
- )
+ cas_dir = CasBasedDirectory(self._get_context().get_cascache(), digest=self.__digest)
directory.import_files(cas_dir)
else:
self.stage(directory)
@@ -868,9 +826,7 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
key["unique"] = self._stage_into_cas()
else:
- key[
- "unique"
- ] = self.get_unique_key() # pylint: disable=assignment-from-no-return
+ key["unique"] = self.get_unique_key() # pylint: disable=assignment-from-no-return
return key
# _project_refs():
@@ -919,17 +875,12 @@ class Source(Plugin):
self.load_ref(ref_node)
except ImplError as e:
raise SourceError(
- "{}: Storing refs in project.refs is not supported by '{}' sources".format(
- self, self.get_kind()
- ),
+ "{}: Storing refs in project.refs is not supported by '{}' sources".format(self, self.get_kind()),
reason="unsupported-load-ref",
) from e
# If the main project overrides the ref, use the override
- if (
- project is not toplevel
- and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
- ):
+ if project is not toplevel and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
refs = self._project_refs(toplevel)
ref_node = refs.lookup_ref(project.name, element_name, element_idx)
if ref_node is not None:
@@ -987,35 +938,22 @@ class Source(Plugin):
#
node = {}
if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- node = toplevel_refs.lookup_ref(
- project.name, element_name, element_idx, write=True
- )
+ node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
if project is toplevel and not node:
node = provenance._node
# Ensure the node is not from a junction
- if (
- not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
- and provenance._project is not toplevel
- ):
+ if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance._project is not toplevel:
if provenance._project is project:
- self.warn(
- "{}: Not persisting new reference in junctioned project".format(
- self
- )
- )
+ self.warn("{}: Not persisting new reference in junctioned project".format(self))
elif provenance._project is None:
assert provenance._filename == ""
assert provenance._shortname == ""
- raise SourceError(
- "{}: Error saving source reference to synthetic node.".format(self)
- )
+ raise SourceError("{}: Error saving source reference to synthetic node.".format(self))
else:
raise SourceError(
- "{}: Cannot track source in a fragment from a junction".format(
- provenance._shortname
- ),
+ "{}: Cannot track source in a fragment from a junction".format(provenance._shortname),
reason="tracking-junction-fragment",
)
@@ -1061,9 +999,7 @@ class Source(Plugin):
if type(step) is str: # pylint: disable=unidiomatic-typecheck
# handle dict container
if step not in container:
- if (
- type(next_step) is str
- ): # pylint: disable=unidiomatic-typecheck
+ if type(next_step) is str: # pylint: disable=unidiomatic-typecheck
container[step] = {}
else:
container[step] = []
@@ -1107,9 +1043,9 @@ class Source(Plugin):
roundtrip_file = roundtrip_cache.get(provenance._filename)
if not roundtrip_file:
- roundtrip_file = roundtrip_cache[
- provenance._filename
- ] = _yaml.roundtrip_load(provenance._filename, allow_missing=True)
+ roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load(
+ provenance._filename, allow_missing=True
+ )
# Get the value of the round trip file that we need to change
process_value(action, roundtrip_file, path, key, to_modify.get(key))
@@ -1123,10 +1059,7 @@ class Source(Plugin):
_yaml.roundtrip_dump(data, filename)
except OSError as e:
raise SourceError(
- "{}: Error saving source reference to '{}': {}".format(
- self, filename, e
- ),
- reason="save-ref-error",
+ "{}: Error saving source reference to '{}': {}".format(self, filename, e), reason="save-ref-error",
) from e
return True
@@ -1145,9 +1078,7 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
with self.__stage_previous_sources(previous_sources) as staging_directory:
- new_ref = self.__do_track(
- previous_sources_dir=self.__ensure_directory(staging_directory)
- )
+ new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
new_ref = self.__do_track()
@@ -1174,10 +1105,7 @@ class Source(Plugin):
# (bool): Whether this source requires access to previous sources
#
def _requires_previous_sources(self):
- return (
- self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK
- or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
- )
+ return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
# Returns the alias if it's defined in the project
def _get_alias(self):
@@ -1283,13 +1211,7 @@ class Source(Plugin):
meta.first_pass = self.__first_pass
- clone = source_kind(
- context,
- project,
- meta,
- alias_override=(alias, uri),
- unique_id=self._unique_id,
- )
+ clone = source_kind(context, project, meta, alias_override=(alias, uri), unique_id=self._unique_id,)
# Do the necessary post instantiation routines here
#
@@ -1407,9 +1329,7 @@ class Source(Plugin):
# NOTE: We are assuming here that tracking only requires substituting the
# first alias used
- for uri in reversed(
- project.get_alias_uris(alias, first_pass=self.__first_pass)
- ):
+ for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
new_source = self.__clone_for_uri(uri)
try:
ref = new_source.track(**kwargs) # pylint: disable=assignment-from-none
@@ -1432,20 +1352,16 @@ class Source(Plugin):
os.makedirs(directory, exist_ok=True)
except OSError as e:
raise SourceError(
- "Failed to create staging directory: {}".format(e),
- reason="ensure-stage-dir-fail",
+ "Failed to create staging directory: {}".format(e), reason="ensure-stage-dir-fail",
) from e
else:
if self.__directory is not None:
try:
- directory = directory.descend(
- *self.__directory.lstrip(os.sep).split(os.sep), create=True
- )
+ directory = directory.descend(*self.__directory.lstrip(os.sep).split(os.sep), create=True)
except VirtualDirectoryError as e:
raise SourceError(
- "Failed to descend into staging directory: {}".format(e),
- reason="ensure-stage-dir-fail",
+ "Failed to descend into staging directory: {}".format(e), reason="ensure-stage-dir-fail",
) from e
return directory
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 3b248f3ae..9c5c179b0 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -99,15 +99,7 @@ class CasBasedDirectory(Directory):
_pb2_path_sep = "/"
_pb2_absolute_path_prefix = "/"
- def __init__(
- self,
- cas_cache,
- *,
- digest=None,
- parent=None,
- common_name="untitled",
- filename=None
- ):
+ def __init__(self, cas_cache, *, digest=None, parent=None, common_name="untitled", filename=None):
self.filename = filename
self.common_name = common_name
self.cas_cache = cas_cache
@@ -123,25 +115,16 @@ class CasBasedDirectory(Directory):
with open(self.cas_cache.objpath(digest), "rb") as f:
pb2_directory.ParseFromString(f.read())
except FileNotFoundError as e:
- raise VirtualDirectoryError(
- "Directory not found in local cache: {}".format(e)
- ) from e
+ raise VirtualDirectoryError("Directory not found in local cache: {}".format(e)) from e
for entry in pb2_directory.directories:
- self.index[entry.name] = IndexEntry(
- entry.name, _FileType.DIRECTORY, digest=entry.digest
- )
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY, digest=entry.digest)
for entry in pb2_directory.files:
self.index[entry.name] = IndexEntry(
- entry.name,
- _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable,
+ entry.name, _FileType.REGULAR_FILE, digest=entry.digest, is_executable=entry.is_executable,
)
for entry in pb2_directory.symlinks:
- self.index[entry.name] = IndexEntry(
- entry.name, _FileType.SYMLINK, target=entry.target
- )
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK, target=entry.target)
def _find_self_in_parent(self):
assert self.parent is not None
@@ -156,20 +139,14 @@ class CasBasedDirectory(Directory):
newdir = CasBasedDirectory(self.cas_cache, parent=self, filename=name)
- self.index[name] = IndexEntry(
- name, _FileType.DIRECTORY, buildstream_object=newdir
- )
+ self.index[name] = IndexEntry(name, _FileType.DIRECTORY, buildstream_object=newdir)
self.__invalidate_digest()
return newdir
def _add_file(self, basename, filename, modified=False, can_link=False):
- entry = IndexEntry(
- filename,
- _FileType.REGULAR_FILE,
- modified=modified or filename in self.index,
- )
+ entry = IndexEntry(filename, _FileType.REGULAR_FILE, modified=modified or filename in self.index,)
path = os.path.join(basename, filename)
entry.digest = self.cas_cache.add_object(path=path, link_directly=can_link)
entry.is_executable = os.access(path, os.X_OK)
@@ -178,14 +155,10 @@ class CasBasedDirectory(Directory):
self.__invalidate_digest()
def _copy_link_from_filesystem(self, basename, filename):
- self._add_new_link_direct(
- filename, os.readlink(os.path.join(basename, filename))
- )
+ self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
def _add_new_link_direct(self, name, target):
- self.index[name] = IndexEntry(
- name, _FileType.SYMLINK, target=target, modified=name in self.index
- )
+ self.index[name] = IndexEntry(name, _FileType.SYMLINK, target=target, modified=name in self.index)
self.__invalidate_digest()
@@ -237,20 +210,13 @@ class CasBasedDirectory(Directory):
linklocation = entry.target
newpaths = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- current_dir = current_dir.find_root().descend(
- *newpaths, follow_symlinks=True
- )
+ current_dir = current_dir.find_root().descend(*newpaths, follow_symlinks=True)
else:
- current_dir = current_dir.descend(
- *newpaths, follow_symlinks=True
- )
+ current_dir = current_dir.descend(*newpaths, follow_symlinks=True)
else:
- error = (
- "Cannot descend into {}, which is a '{}' in the directory {}"
- )
+ error = "Cannot descend into {}, which is a '{}' in the directory {}"
raise VirtualDirectoryError(
- error.format(path, current_dir.index[path].type, current_dir),
- reason="not-a-directory",
+ error.format(path, current_dir.index[path].type, current_dir), reason="not-a-directory",
)
else:
if path == ".":
@@ -265,8 +231,7 @@ class CasBasedDirectory(Directory):
else:
error = "'{}' not found in {}"
raise VirtualDirectoryError(
- error.format(path, str(current_dir)),
- reason="directory-not-found",
+ error.format(path, str(current_dir)), reason="directory-not-found",
)
return current_dir
@@ -297,9 +262,7 @@ class CasBasedDirectory(Directory):
fileListResult.overwritten.append(relative_pathname)
return True
- def _partial_import_cas_into_cas(
- self, source_directory, filter_callback, *, path_prefix="", origin=None, result
- ):
+ def _partial_import_cas_into_cas(self, source_directory, filter_callback, *, path_prefix="", origin=None, result):
""" Import files from a CAS-based directory. """
if origin is None:
origin = self
@@ -318,9 +281,7 @@ class CasBasedDirectory(Directory):
# we can import the whole source directory by digest instead
# of importing each directory entry individually.
subdir_digest = entry.get_digest()
- dest_entry = IndexEntry(
- name, _FileType.DIRECTORY, digest=subdir_digest
- )
+ dest_entry = IndexEntry(name, _FileType.DIRECTORY, digest=subdir_digest)
self.index[name] = dest_entry
self.__invalidate_digest()
@@ -337,9 +298,7 @@ class CasBasedDirectory(Directory):
else:
subdir = dest_entry.get_directory(self)
- subdir.__add_files_to_result(
- path_prefix=relative_pathname, result=result
- )
+ subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
else:
src_subdir = source_directory.descend(name)
if src_subdir == origin:
@@ -350,17 +309,11 @@ class CasBasedDirectory(Directory):
except VirtualDirectoryError:
filetype = self.index[name].type
raise VirtualDirectoryError(
- "Destination is a {}, not a directory: /{}".format(
- filetype, relative_pathname
- )
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
)
dest_subdir._partial_import_cas_into_cas(
- src_subdir,
- filter_callback,
- path_prefix=relative_pathname,
- origin=origin,
- result=result,
+ src_subdir, filter_callback, path_prefix=relative_pathname, origin=origin, result=result,
)
if filter_callback and not filter_callback(relative_pathname):
@@ -388,13 +341,7 @@ class CasBasedDirectory(Directory):
result.files_written.append(relative_pathname)
def import_files(
- self,
- external_pathspec,
- *,
- filter_callback=None,
- report_written=True,
- update_mtime=False,
- can_link=False
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
):
""" See superclass Directory for arguments """
@@ -413,9 +360,7 @@ class CasBasedDirectory(Directory):
external_pathspec = CasBasedDirectory(self.cas_cache, digest=digest)
assert isinstance(external_pathspec, CasBasedDirectory)
- self._partial_import_cas_into_cas(
- external_pathspec, filter_callback, result=result
- )
+ self._partial_import_cas_into_cas(external_pathspec, filter_callback, result=result)
# TODO: No notice is taken of report_written or update_mtime.
# Current behaviour is to fully populate the report, which is inefficient,
@@ -425,11 +370,7 @@ class CasBasedDirectory(Directory):
def import_single_file(self, external_pathspec):
result = FileListResult()
- if self._check_replacement(
- os.path.basename(external_pathspec),
- os.path.dirname(external_pathspec),
- result,
- ):
+ if self._check_replacement(os.path.basename(external_pathspec), os.path.dirname(external_pathspec), result,):
self._add_file(
os.path.dirname(external_pathspec),
os.path.basename(external_pathspec),
@@ -495,9 +436,7 @@ class CasBasedDirectory(Directory):
f = StringIO(entry.target)
tarfile.addfile(tarinfo, f)
else:
- raise VirtualDirectoryError(
- "can not export file type {} to tar".format(entry.type)
- )
+ raise VirtualDirectoryError("can not export file type {} to tar".format(entry.type))
def _mark_changed(self):
""" It should not be possible to externally modify a CAS-based
@@ -588,12 +527,8 @@ class CasBasedDirectory(Directory):
"""
- file_list = list(
- filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items())
- )
- directory_list = filter(
- lambda i: i[1].type == _FileType.DIRECTORY, self.index.items()
- )
+ file_list = list(filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items()))
+ directory_list = filter(lambda i: i[1].type == _FileType.DIRECTORY, self.index.items())
if prefix != "":
yield prefix
@@ -603,9 +538,7 @@ class CasBasedDirectory(Directory):
for (k, v) in sorted(directory_list):
subdir = v.get_directory(self)
- yield from subdir._list_prefixed_relative_paths(
- prefix=os.path.join(prefix, k)
- )
+ yield from subdir._list_prefixed_relative_paths(prefix=os.path.join(prefix, k))
def walk(self):
"""Provide a list of dictionaries containing information about the files.
@@ -673,8 +606,7 @@ class CasBasedDirectory(Directory):
""" There is no underlying directory for a CAS-backed directory, so
throw an exception. """
raise VirtualDirectoryError(
- "_get_underlying_directory was called on a CAS-backed directory,"
- + " which has no underlying directory."
+ "_get_underlying_directory was called on a CAS-backed directory," + " which has no underlying directory."
)
# _get_digest():
@@ -712,9 +644,7 @@ class CasBasedDirectory(Directory):
symlinknode.name = name
symlinknode.target = entry.target
- self.__digest = self.cas_cache.add_object(
- buffer=pb2_directory.SerializeToString()
- )
+ self.__digest = self.cas_cache.add_object(buffer=pb2_directory.SerializeToString())
return self.__digest
@@ -729,9 +659,7 @@ class CasBasedDirectory(Directory):
linklocation = target.target
newpath = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- return subdir.find_root()._exists(
- *newpath, follow_symlinks=True
- )
+ return subdir.find_root()._exists(*newpath, follow_symlinks=True)
return subdir._exists(*newpath, follow_symlinks=True)
return False
except VirtualDirectoryError:
@@ -750,8 +678,6 @@ class CasBasedDirectory(Directory):
if entry.type == _FileType.DIRECTORY:
subdir = self.descend(name)
- subdir.__add_files_to_result(
- path_prefix=relative_pathname, result=result
- )
+ subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
else:
result.files_written.append(relative_pathname)
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 21515649d..0926c1b3a 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -58,9 +58,7 @@ class FileBasedDirectory(Directory):
""" See superclass Directory for arguments """
if follow_symlinks:
- ImplError(
- "FileBasedDirectory.Decend dose not implement follow_symlinks=True"
- )
+ ImplError("FileBasedDirectory.Decend dose not implement follow_symlinks=True")
current_dir = self
@@ -74,38 +72,24 @@ class FileBasedDirectory(Directory):
st = os.lstat(new_path)
if not stat.S_ISDIR(st.st_mode):
raise VirtualDirectoryError(
- "Cannot descend into '{}': '{}' is not a directory".format(
- path, new_path
- )
+ "Cannot descend into '{}': '{}' is not a directory".format(path, new_path)
)
except FileNotFoundError:
if create:
os.mkdir(new_path)
else:
- raise VirtualDirectoryError(
- "Cannot descend into '{}': '{}' does not exist".format(
- path, new_path
- )
- )
+ raise VirtualDirectoryError("Cannot descend into '{}': '{}' does not exist".format(path, new_path))
current_dir = FileBasedDirectory(new_path)
return current_dir
def import_files(
- self,
- external_pathspec,
- *,
- filter_callback=None,
- report_written=True,
- update_mtime=False,
- can_link=False
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
):
""" See superclass Directory for arguments """
- from ._casbaseddirectory import (
- CasBasedDirectory,
- ) # pylint: disable=cyclic-import
+ from ._casbaseddirectory import CasBasedDirectory # pylint: disable=cyclic-import
if isinstance(external_pathspec, CasBasedDirectory):
if can_link and not update_mtime:
@@ -114,9 +98,7 @@ class FileBasedDirectory(Directory):
actionfunc = utils.safe_copy
import_result = FileListResult()
- self._import_files_from_cas(
- external_pathspec, actionfunc, filter_callback, result=import_result
- )
+ self._import_files_from_cas(external_pathspec, actionfunc, filter_callback, result=import_result)
else:
if isinstance(external_pathspec, Directory):
source_directory = external_pathspec.external_directory
@@ -144,15 +126,11 @@ class FileBasedDirectory(Directory):
cur_time = time.time()
for f in import_result.files_written:
- os.utime(
- os.path.join(self.external_directory, f), times=(cur_time, cur_time)
- )
+ os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
return import_result
def import_single_file(self, external_pathspec):
- dstpath = os.path.join(
- self.external_directory, os.path.basename(external_pathspec)
- )
+ dstpath = os.path.join(self.external_directory, os.path.basename(external_pathspec))
result = FileListResult()
if os.path.exists(dstpath):
result.ignored.append(dstpath)
@@ -206,9 +184,7 @@ class FileBasedDirectory(Directory):
tarfile.addfile(tarinfo, f)
elif tarinfo.isdir():
tarfile.addfile(tarinfo)
- self.descend(*filename.split(os.path.sep)).export_to_tar(
- tarfile, arcname, mtime
- )
+ self.descend(*filename.split(os.path.sep)).export_to_tar(tarfile, arcname, mtime)
else:
tarfile.addfile(tarinfo)
@@ -230,8 +206,7 @@ class FileBasedDirectory(Directory):
return [
f
for f in list_relative_paths(self.external_directory)
- if _get_link_mtime(os.path.join(self.external_directory, f))
- != BST_ARBITRARY_TIMESTAMP
+ if _get_link_mtime(os.path.join(self.external_directory, f)) != BST_ARBITRARY_TIMESTAMP
]
def list_relative_paths(self):
@@ -272,9 +247,7 @@ class FileBasedDirectory(Directory):
else:
return _FileType.SPECIAL_FILE
- def _import_files_from_cas(
- self, source_directory, actionfunc, filter_callback, *, path_prefix="", result
- ):
+ def _import_files_from_cas(self, source_directory, actionfunc, filter_callback, *, path_prefix="", result):
""" Import files from a CAS-based directory. """
for name, entry in source_directory.index.items():
@@ -295,17 +268,11 @@ class FileBasedDirectory(Directory):
except VirtualDirectoryError:
filetype = self._get_filetype(name)
raise VirtualDirectoryError(
- "Destination is a {}, not a directory: /{}".format(
- filetype, relative_pathname
- )
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
)
dest_subdir._import_files_from_cas(
- src_subdir,
- actionfunc,
- filter_callback,
- path_prefix=relative_pathname,
- result=result,
+ src_subdir, actionfunc, filter_callback, path_prefix=relative_pathname, result=result,
)
if filter_callback and not filter_callback(relative_pathname):
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index 2fd882e18..67e96885a 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -32,10 +32,7 @@ try:
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = (
- "Could not import pytest:\n"
- "To use the {} module, you must have pytest installed.".format(module_name)
- )
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
@@ -120,9 +117,7 @@ def sourcetests_collection_hook(session):
# Add the location of the source tests to the session's
# python_files config. Without this, pytest may filter out these
# tests during collection.
- session.config.addinivalue_line(
- "python_files", os.path.join(source_test_path, "*.py")
- )
+ session.config.addinivalue_line("python_files", os.path.join(source_test_path, "*.py"))
# If test invocation has specified specic tests, don't
# automatically collect templated tests.
if should_collect_tests(session.config):
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py
index e673702e2..d0abb0345 100644
--- a/src/buildstream/testing/_sourcetests/build_checkout.py
+++ b/src/buildstream/testing/_sourcetests/build_checkout.py
@@ -64,10 +64,7 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
# Now check it out
result = cli.run(
- project=project,
- args=strict_args(
- ["artifact", "checkout", element_name, "--directory", checkout], strict
- ),
+ project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict),
)
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py
index fc95c6e5b..e07bf8824 100644
--- a/src/buildstream/testing/_sourcetests/fetch.py
+++ b/src/buildstream/testing/_sourcetests/fetch.py
@@ -85,19 +85,13 @@ def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
update_project_configuration(project, {"ref-storage": ref_storage})
- generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
if ref_storage == "project.refs":
result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- result = cli.run(
- project=project, args=["source", "track", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc.bst"])
result.assert_success()
- result = cli.run(
- project=project, args=["source", "fetch", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["source", "fetch", "junction.bst:import-etc.bst"])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index a28bf3c00..b907a6ee4 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -163,17 +163,11 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {"name": "config"}
- _yaml.roundtrip_dump(
- config_project, os.path.join(config_project_dir, "project.conf")
- )
- extra_mirrors = {
- "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
- }
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
_yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
generate_junction(
- str(tmpdir.join("config_repo")),
- config_project_dir,
- os.path.join(element_dir, "config.bst"),
+ str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"),
)
_set_project_includes_and_aliases(
@@ -217,22 +211,14 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {"name": "config"}
- _yaml.roundtrip_dump(
- config_project, os.path.join(config_project_dir, "project.conf")
- )
- extra_mirrors = {
- "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
- }
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
_yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
generate_junction(
- str(tmpdir.join("config_repo")),
- config_project_dir,
- os.path.join(element_dir, "config.bst"),
+ str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"),
)
- _set_project_includes_and_aliases(
- project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"}
- )
+ _set_project_includes_and_aliases(project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"})
# Now make the upstream unavailable.
os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index 465afa23b..d829984a8 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -49,12 +49,8 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+@pytest.mark.skipif(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix")
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
element_name = "list.bst"
@@ -96,10 +92,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir],)
result.assert_success()
with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index c857e246d..8c54f6259 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -138,18 +138,14 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
assert states[element_name] == "no reference"
# Now first try to track it
- result = cli.run(
- project=project, args=["source", "track", "--deps", "all", last_element_name]
- )
+ result = cli.run(project=project, args=["source", "track", "--deps", "all", last_element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "all", last_element_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "all", last_element_name])
result.assert_success()
# Assert that the base is buildable and the rest are waiting
@@ -177,9 +173,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(
- repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
- )
+ generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name)
# Assert that a fetch is needed
states = cli.get_element_states(project, [element_target_name])
@@ -188,16 +182,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Now first try to track it
result = cli.run(
- project=project,
- args=[
- "source",
- "track",
- "--deps",
- "all",
- "--except",
- element_dep_name,
- element_target_name,
- ],
+ project=project, args=["source", "track", "--deps", "all", "--except", element_dep_name, element_target_name,],
)
result.assert_success()
@@ -205,9 +190,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name])
result.assert_success()
# Assert that the dependency is buildable and the target is waiting
@@ -233,25 +216,17 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
generate_element(repo, repo_element_path)
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=False,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False,
)
# Track the junction itself first.
result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- assert (
- cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
- == "no reference"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "no reference"
# Track the cross junction element. -J is not given, it is implied.
- result = cli.run(
- project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"])
if ref_storage == "inline":
# This is not allowed to track cross junction without project.refs.
@@ -259,10 +234,7 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
else:
result.assert_success()
- assert (
- cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
- == "buildable"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "buildable"
assert os.path.exists(os.path.join(project, "project.refs"))
@@ -354,10 +326,7 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
_yaml.roundtrip_dump(sources, os.path.join(sub_element_path, "sources.yml"))
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=True,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=True,
)
result = cli.run(project=project, args=["source", "track", "junction.bst"])
@@ -374,10 +343,7 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
# FIXME: We should expect an error. But only a warning is emitted
# result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
- assert (
- "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction"
- in result.stderr
- )
+ assert "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction" in result.stderr
else:
assert os.path.exists(os.path.join(project, "project.refs"))
@@ -401,15 +367,10 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
subproject_path = os.path.join(project, "files", "sub-project")
junction_path = os.path.join(element_path, "junction.bst")
- update_project_configuration(
- project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]}
- )
+ update_project_configuration(project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]})
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=False,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False,
)
result = cli.run(project=project, args=["source", "track", "junction.bst"])
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index 470b67e8d..e69e25be9 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -115,15 +115,11 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
# Create junctions for projects a and b in main.
junction_a = "{}.bst".format(project_a)
junction_a_path = os.path.join(project, "elements", junction_a)
- generate_junction(
- tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
junction_b = "{}.bst".format(project_b)
junction_b_path = os.path.join(project, "elements", junction_b)
- generate_junction(
- tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False)
# Track the junctions.
result = cli.run(project=project, args=["source", "track", junction_a, junction_b])
@@ -138,15 +134,7 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
# Track without following junctions. But explicitly also track the elements in project a.
result = cli.run(
- project=project,
- args=[
- "source",
- "track",
- "--deps",
- "all",
- all_bst,
- "{}:{}".format(junction_a, stack_a),
- ],
+ project=project, args=["source", "track", "--deps", "all", all_bst, "{}:{}".format(junction_a, stack_a),],
)
result.assert_success()
@@ -169,9 +157,7 @@ def test_track_exceptions(cli, tmpdir, kind):
junction_a = "{}.bst".format(project_a)
junction_a_path = os.path.join(project, "elements", junction_a)
- generate_junction(
- tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
result = cli.run(project=project, args=["source", "track", junction_a])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/utils.py b/src/buildstream/testing/_sourcetests/utils.py
index ca245a57d..1ceefa3ce 100644
--- a/src/buildstream/testing/_sourcetests/utils.py
+++ b/src/buildstream/testing/_sourcetests/utils.py
@@ -28,10 +28,7 @@ try:
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = (
- "Could not import pytest:\n"
- "To use the {} module, you must have pytest installed.".format(module_name)
- )
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
from buildstream import _yaml
@@ -77,11 +74,7 @@ def add_plugins_conf(project, plugin_kind):
if plugin_package is not None:
project_conf["plugins"] = [
- {
- "origin": "pip",
- "package-name": plugin_package,
- "sources": {plugin_kind: 0,},
- },
+ {"origin": "pip", "package-name": plugin_package, "sources": {plugin_kind: 0,},},
]
_yaml.roundtrip_dump(project_conf, project_conf_file)
diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py
index 7cc308006..149723069 100644
--- a/src/buildstream/testing/_sourcetests/workspace.py
+++ b/src/buildstream/testing/_sourcetests/workspace.py
@@ -49,9 +49,7 @@ class WorkspaceCreator:
self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(
- self, kind, track, suffix="", workspace_dir=None, element_attrs=None
- ):
+ def create_workspace_element(self, kind, track, suffix="", workspace_dir=None, element_attrs=None):
element_name = "workspace-test-{}{}.bst".format(kind, suffix)
element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
@@ -73,9 +71,7 @@ class WorkspaceCreator:
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(
- self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None
- ):
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None):
element_tuples = []
@@ -92,9 +88,7 @@ class WorkspaceCreator:
element_tuples.append((element_name, workspace_dir))
# Assert that there is no reference, a track & fetch is needed
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
if track:
assert not any(states[e] != "no reference" for e, _ in element_tuples)
else:
@@ -103,18 +97,10 @@ class WorkspaceCreator:
return element_tuples
def open_workspaces(
- self,
- kinds,
- track,
- suffixs=None,
- workspace_dir=None,
- element_attrs=None,
- no_checkout=False,
+ self, kinds, track, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False,
):
- element_tuples = self.create_workspace_elements(
- kinds, track, suffixs, workspace_dir, element_attrs
- )
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -129,20 +115,14 @@ class WorkspaceCreator:
_, workspace_dir = element_tuples[0]
args.extend(["--directory", workspace_dir])
- args.extend(
- [element_name for element_name, workspace_dir_suffix in element_tuples]
- )
- result = self.cli.run(
- cwd=self.workspace_cmd, project=self.project_path, args=args
- )
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
@@ -166,9 +146,7 @@ def open_workspace(
no_checkout=False,
):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces(
- (kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout
- )
+ workspaces = workspace_object.open_workspaces((kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
diff --git a/src/buildstream/testing/_utils/junction.py b/src/buildstream/testing/_utils/junction.py
index ddfbead55..cfc5898a9 100644
--- a/src/buildstream/testing/_utils/junction.py
+++ b/src/buildstream/testing/_utils/junction.py
@@ -49,9 +49,7 @@ class _SimpleGit(Repo):
return self.latest_commit()
def latest_commit(self):
- return self._run_git(
- "rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,
- ).stdout.strip()
+ return self._run_git("rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,).stdout.strip()
def source_config(self, ref=None):
return self.source_config_extra(ref)
diff --git a/src/buildstream/testing/integration.py b/src/buildstream/testing/integration.py
index 5734c6c82..584d7da1b 100644
--- a/src/buildstream/testing/integration.py
+++ b/src/buildstream/testing/integration.py
@@ -51,9 +51,7 @@ def assert_contains(directory, expected):
missing = set(expected)
missing.difference_update(walk_dir(directory))
if missing:
- raise AssertionError(
- "Missing {} expected elements from list: {}".format(len(missing), missing)
- )
+ raise AssertionError("Missing {} expected elements from list: {}".format(len(missing), missing))
class IntegrationCache:
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 9cded9f9e..6c9197d0d 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -62,9 +62,7 @@ from buildstream._protos.buildstream.v2 import artifact_pb2
# Wrapper for the click.testing result
class Result:
- def __init__(
- self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None
- ):
+ def __init__(self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None):
self.exit_code = exit_code
self.exc = exception
self.exc_info = exc_info
@@ -126,9 +124,7 @@ class Result:
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_main_error(
- self, error_domain, error_reason, fail_message="", *, debug=False
- ):
+ def assert_main_error(self, error_domain, error_reason, fail_message="", *, debug=False):
if debug:
print(
"""
@@ -137,10 +133,7 @@ class Result:
Domain: {}
Reason: {}
""".format(
- self.exit_code,
- self.exception,
- self.exception.domain,
- self.exception.reason,
+ self.exit_code, self.exception, self.exception.domain, self.exception.reason,
)
)
assert self.exit_code == -1, fail_message
@@ -203,9 +196,7 @@ class Result:
# (list): A list of element names in the order which they first appeared in the result
#
def get_start_order(self, activity):
- results = re.findall(
- r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr
- )
+ results = re.findall(r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr)
if results is None:
return []
return list(results)
@@ -228,18 +219,14 @@ class Result:
return list(tracked)
def get_pushed_elements(self):
- pushed = re.findall(
- r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr
- )
+ pushed = re.findall(r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr)
if pushed is None:
return []
return list(pushed)
def get_pulled_elements(self):
- pulled = re.findall(
- r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr
- )
+ pulled = re.findall(r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr)
if pulled is None:
return []
@@ -345,9 +332,7 @@ class Cli:
bst_args += ["--no-verbose"]
if configure:
- config_file = stack.enter_context(
- configured(self.directory, self.config)
- )
+ config_file = stack.enter_context(configured(self.directory, self.config))
bst_args += ["--config", config_file]
if project:
@@ -377,11 +362,7 @@ class Cli:
# Some informative stdout we can observe when anything fails
if self.verbose:
command = "bst " + " ".join(bst_args)
- print(
- "BuildStream exited with code {} for invocation:\n\t{}".format(
- result.exit_code, command
- )
- )
+ print("BuildStream exited with code {} for invocation:\n\t{}".format(result.exit_code, command))
if result.output:
print("Program output was:\n{}".format(result.output))
if result.stderr:
@@ -431,13 +412,7 @@ class Cli:
out, err = capture.readouterr()
capture.stop_capturing()
- return Result(
- exit_code=exit_code,
- exception=exception,
- exc_info=exc_info,
- output=out,
- stderr=err,
- )
+ return Result(exit_code=exit_code, exception=exception, exc_info=exc_info, output=out, stderr=err,)
# Fetch an element state by name by
# invoking bst show on the project with the CLI
@@ -447,9 +422,7 @@ class Cli:
#
def get_element_state(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{state}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{state}", element_name],
)
result.assert_success()
return result.output.strip()
@@ -460,9 +433,7 @@ class Cli:
#
def get_element_states(self, project, targets, deps="all"):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets],
+ project=project, silent=True, args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets],
)
result.assert_success()
lines = result.output.splitlines()
@@ -477,9 +448,7 @@ class Cli:
#
def get_element_key(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{full-key}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{full-key}", element_name],
)
result.assert_success()
return result.output.strip()
@@ -488,9 +457,7 @@ class Cli:
#
def get_element_config(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{config}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{config}", element_name],
)
result.assert_success()
@@ -504,9 +471,7 @@ class Cli:
except_ = []
args = ["show", "--deps", scope, "--format", "%{name}"]
- args += list(
- itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_))
- )
+ args += list(itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_)))
result = self.run(project=project, silent=True, args=args + elements)
result.assert_success()
@@ -634,9 +599,7 @@ class CliRemote(CliIntegration):
#
# Returns a list of configured services (by names).
#
- def ensure_services(
- self, actions=True, execution=True, storage=True, artifacts=False, sources=False
- ):
+ def ensure_services(self, actions=True, execution=True, storage=True, artifacts=False, sources=False):
# Build a list of configured services by name:
configured_services = []
if not self.config:
@@ -712,9 +675,7 @@ class TestArtifact:
# cas = CASCache(str(cache_dir))
artifact_ref = element.get_artifact_name(element_key)
- return os.path.exists(
- os.path.join(cache_dir, "artifacts", "refs", artifact_ref)
- )
+ return os.path.exists(os.path.join(cache_dir, "artifacts", "refs", artifact_ref))
# get_digest():
#
@@ -815,10 +776,7 @@ def cli_integration(tmpdir, integration_cache):
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
fixture.configure(
- {
- "cachedir": integration_cache.cachedir,
- "sourcedir": integration_cache.sources,
- }
+ {"cachedir": integration_cache.cachedir, "sourcedir": integration_cache.sources,}
)
yield fixture
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 2a27891ba..180044dbd 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -68,18 +68,14 @@ class FastEnum(metaclass=MetaFastEnum):
def __eq__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
# Enums instances are unique, so creating an instance with the same value as another will just
# send back the other one, hence we can use an identity comparison, which is much faster than '=='
return self is other
def __ne__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self is not other
def __hash__(self):
@@ -146,16 +142,12 @@ class Consistency(FastEnum):
def __ge__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self.value >= other.value
def __lt__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self.value < other.value
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index ec57b7f7f..1f16837df 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -250,9 +250,7 @@ def sha256sum(filename: str) -> str:
h.update(chunk)
except OSError as e:
- raise UtilError(
- "Failed to get a checksum of file '{}': {}".format(filename, e)
- ) from e
+ raise UtilError("Failed to get a checksum of file '{}': {}".format(filename, e)) from e
return h.hexdigest()
@@ -277,9 +275,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError(
- "Failed to remove destination file '{}': {}".format(dest, e)
- ) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
shutil.copyfile(src, dest)
try:
@@ -298,9 +294,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e)) from e
-def safe_link(
- src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False
-) -> None:
+def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False) -> None:
"""Try to create a hardlink, but resort to copying in the case of cross device links.
Args:
@@ -318,9 +312,7 @@ def safe_link(
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError(
- "Failed to remove destination file '{}': {}".format(dest, e)
- ) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
# If we can't link it due to cross-device hardlink, copy
try:
@@ -493,9 +485,7 @@ def get_host_tool(name: str) -> str:
program_path = shutil.which(name, path=search_path)
if not program_path:
- raise ProgramNotFoundError(
- "Did not find '{}' in PATH: {}".format(name, search_path)
- )
+ raise ProgramNotFoundError("Did not find '{}' in PATH: {}".format(name, search_path))
return program_path
@@ -532,12 +522,7 @@ def get_bst_version() -> Tuple[int, int]:
)
-def move_atomic(
- source: Union[Path, str],
- destination: Union[Path, str],
- *,
- ensure_parents: bool = True
-) -> None:
+def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensure_parents: bool = True) -> None:
"""Move the source to the destination using atomic primitives.
This uses `os.rename` to move a file or directory to a new destination.
@@ -613,9 +598,7 @@ def save_file_atomic(
# This feature has been proposed for upstream Python in the past, e.g.:
# https://bugs.python.org/issue8604
- assert os.path.isabs(
- filename
- ), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
+ assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
if tempdir is None:
tempdir = os.path.dirname(filename)
fd, tempname = tempfile.mkstemp(dir=tempdir)
@@ -639,9 +622,7 @@ def save_file_atomic(
except FileNotFoundError:
pass
except OSError as e:
- raise UtilError(
- "Failed to cleanup temporary file {}: {}".format(tempname, e)
- ) from e
+ raise UtilError("Failed to cleanup temporary file {}: {}".format(tempname, e)) from e
try:
with _signals.terminator(cleanup_tempfile):
@@ -702,9 +683,7 @@ def _get_volume_size(path):
try:
usage = shutil.disk_usage(path)
except OSError as e:
- raise UtilError(
- "Failed to retrieve stats on volume for path '{}': {}".format(path, e)
- ) from e
+ raise UtilError("Failed to retrieve stats on volume for path '{}': {}".format(path, e)) from e
return usage.total, usage.free
@@ -794,11 +773,7 @@ def _force_rmtree(rootpath, **kwargs):
try:
os.chmod(path, 0o755)
except OSError as e:
- raise UtilError(
- "Failed to ensure write permission on file '{}': {}".format(
- path, e
- )
- )
+ raise UtilError("Failed to ensure write permission on file '{}': {}".format(path, e))
try:
shutil.rmtree(rootpath, **kwargs)
@@ -824,10 +799,7 @@ def _copy_directories(srcdir, destdir, target):
os.makedirs(new_dir)
yield (new_dir, mode)
else:
- raise UtilError(
- "Source directory tree has file where "
- "directory expected: {}".format(old_dir)
- )
+ raise UtilError("Source directory tree has file where " "directory expected: {}".format(old_dir))
else:
if not os.access(new_dir, os.W_OK):
# If the destination directory is not writable, change permissions to make it
@@ -862,9 +834,7 @@ def _ensure_real_directory(root, path):
else:
filetype = "special file"
- raise UtilError(
- "Destination is a {}, not a directory: {}".format(filetype, relpath)
- )
+ raise UtilError("Destination is a {}, not a directory: {}".format(filetype, relpath))
except FileNotFoundError:
os.makedirs(destpath)
@@ -886,13 +856,7 @@ def _ensure_real_directory(root, path):
#
#
def _process_list(
- srcdir,
- destdir,
- actionfunc,
- result,
- filter_callback=None,
- ignore_missing=False,
- report_written=False,
+ srcdir, destdir, actionfunc, result, filter_callback=None, ignore_missing=False, report_written=False,
):
# Keep track of directory permissions, since these need to be set
@@ -976,9 +940,7 @@ def _process_list(
else:
# Unsupported type.
- raise UtilError(
- "Cannot extract {} into staging-area. Unsupported type.".format(srcpath)
- )
+ raise UtilError("Cannot extract {} into staging-area. Unsupported type.".format(srcpath))
# Write directory permissions now that all files have been written
for d, perms in permissions:
@@ -1085,9 +1047,7 @@ def _tempdir(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-bu
# on SIGTERM.
#
@contextmanager
-def _tempnamedfile(
- suffix="", prefix="tmp", dir=None
-): # pylint: disable=redefined-builtin
+def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-builtin
temp = None
def close_tempfile():
@@ -1261,9 +1221,7 @@ def _call(*popenargs, terminate=False, **kwargs):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
*popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
)
@@ -1469,9 +1427,7 @@ def _get_compression(tar):
if suffix == ".tar":
raise UtilError(
"Expected compression with unknown file extension ('{}'), "
- "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(
- ext
- )
+ "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(ext)
)
# Assume just an unconventional name was provided, default to uncompressed